mirror of
https://github.com/google/nomulus
synced 2026-02-09 06:20:29 +00:00
Remove CommitLog and MapReduce-related code (#1670)
This commit is contained in:
@@ -1,104 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.backup;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.CommitLogCheckpointRoot.loadRoot;
|
||||
import static google.registry.testing.DatabaseHelper.persistResource;
|
||||
import static google.registry.util.DateTimeUtils.START_OF_TIME;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import google.registry.model.ofy.CommitLogCheckpoint;
|
||||
import google.registry.model.ofy.CommitLogCheckpointRoot;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.CloudTasksHelper;
|
||||
import google.registry.testing.CloudTasksHelper.TaskMatcher;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link CommitLogCheckpointAction}. */
|
||||
public class CommitLogCheckpointActionTest {
|
||||
|
||||
private static final String QUEUE_NAME = "export-commits";
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().withTaskQueue().build();
|
||||
|
||||
private CommitLogCheckpointStrategy strategy = mock(CommitLogCheckpointStrategy.class);
|
||||
|
||||
private DateTime now = DateTime.now(UTC);
|
||||
private CommitLogCheckpointAction task = new CommitLogCheckpointAction();
|
||||
private final CloudTasksHelper cloudTasksHelper = new CloudTasksHelper(new FakeClock(now));
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
task.strategy = strategy;
|
||||
task.cloudTasksUtils = cloudTasksHelper.getTestCloudTasksUtils();
|
||||
when(strategy.computeCheckpoint())
|
||||
.thenReturn(
|
||||
CommitLogCheckpoint.create(
|
||||
now, ImmutableMap.of(1, START_OF_TIME, 2, START_OF_TIME, 3, START_OF_TIME)));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_noCheckpointEverWritten_writesCheckpointAndEnqueuesTask() {
|
||||
task.run();
|
||||
cloudTasksHelper.assertTasksEnqueued(
|
||||
QUEUE_NAME,
|
||||
new TaskMatcher()
|
||||
.url(ExportCommitLogDiffAction.PATH)
|
||||
.param(ExportCommitLogDiffAction.LOWER_CHECKPOINT_TIME_PARAM, START_OF_TIME.toString())
|
||||
.param(ExportCommitLogDiffAction.UPPER_CHECKPOINT_TIME_PARAM, now.toString())
|
||||
.scheduleTime(now.plus(CommitLogCheckpointAction.ENQUEUE_DELAY_SECONDS)));
|
||||
assertThat(loadRoot().getLastWrittenTime()).isEqualTo(now);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_checkpointWrittenBeforeNow_writesCheckpointAndEnqueuesTask() {
|
||||
DateTime oneMinuteAgo = now.minusMinutes(1);
|
||||
persistResource(CommitLogCheckpointRoot.create(oneMinuteAgo));
|
||||
task.run();
|
||||
cloudTasksHelper.assertTasksEnqueued(
|
||||
QUEUE_NAME,
|
||||
new TaskMatcher()
|
||||
.url(ExportCommitLogDiffAction.PATH)
|
||||
.param(ExportCommitLogDiffAction.LOWER_CHECKPOINT_TIME_PARAM, oneMinuteAgo.toString())
|
||||
.param(ExportCommitLogDiffAction.UPPER_CHECKPOINT_TIME_PARAM, now.toString())
|
||||
.scheduleTime(now.plus(CommitLogCheckpointAction.ENQUEUE_DELAY_SECONDS)));
|
||||
assertThat(loadRoot().getLastWrittenTime()).isEqualTo(now);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_checkpointWrittenAfterNow_doesntOverwrite_orEnqueueTask() {
|
||||
DateTime oneMinuteFromNow = now.plusMinutes(1);
|
||||
persistResource(CommitLogCheckpointRoot.create(oneMinuteFromNow));
|
||||
task.run();
|
||||
cloudTasksHelper.assertNoTasksEnqueued(QUEUE_NAME);
|
||||
assertThat(loadRoot().getLastWrittenTime()).isEqualTo(oneMinuteFromNow);
|
||||
}
|
||||
}
|
||||
@@ -1,307 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.backup;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.common.Cursor.CursorType.RDE_REPORT;
|
||||
import static google.registry.model.ofy.CommitLogBucket.getBucketKey;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.util.DateTimeUtils.END_OF_TIME;
|
||||
import static google.registry.util.DateTimeUtils.START_OF_TIME;
|
||||
|
||||
import com.google.common.base.Supplier;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import google.registry.model.common.Cursor;
|
||||
import google.registry.model.ofy.CommitLogBucket;
|
||||
import google.registry.model.ofy.CommitLogCheckpoint;
|
||||
import google.registry.model.ofy.DatastoreTransactionManager;
|
||||
import google.registry.model.ofy.Ofy;
|
||||
import google.registry.model.tld.Registry;
|
||||
import google.registry.persistence.transaction.TransactionManager;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.testing.InjectExtension;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Duration;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link CommitLogCheckpointStrategy}. */
|
||||
public class CommitLogCheckpointStrategyTest {
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().build();
|
||||
|
||||
@RegisterExtension public final InjectExtension inject = new InjectExtension();
|
||||
|
||||
private final FakeClock clock = new FakeClock(DateTime.parse("2000-01-01TZ"));
|
||||
private final Ofy ofy = new Ofy(clock);
|
||||
private final TransactionManager tm = new DatastoreTransactionManager(ofy);
|
||||
private final CommitLogCheckpointStrategy strategy = new CommitLogCheckpointStrategy();
|
||||
|
||||
/**
|
||||
* Supplier to inject into CommitLogBucket for doling out predictable bucket IDs.
|
||||
*
|
||||
* <p>If not overridden, the supplier returns 1 so that other saves won't hit an NPE (since even
|
||||
* if they use saveWithoutBackup() the transaction still selects a bucket key early).
|
||||
*/
|
||||
private final FakeSupplier<Integer> fakeBucketIdSupplier = new FakeSupplier<>(1);
|
||||
|
||||
/** Gross but necessary supplier that can be modified to return the desired value. */
|
||||
private static class FakeSupplier<T> implements Supplier<T> {
|
||||
/** Default value to return if 'value' is not set. */
|
||||
final T defaultValue;
|
||||
|
||||
/** Set this value field to make the supplier return this value. */
|
||||
T value = null;
|
||||
|
||||
FakeSupplier(T defaultValue) {
|
||||
this.defaultValue = defaultValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T get() {
|
||||
return value == null ? defaultValue : value;
|
||||
}
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
strategy.clock = clock;
|
||||
strategy.ofy = ofy;
|
||||
|
||||
// Need to inject clock into Ofy so that createTld() below will get the right time.
|
||||
inject.setStaticField(Ofy.class, "clock", clock);
|
||||
// Inject a fake bucket ID supplier so we can dole out specific bucket IDs to commit logs.
|
||||
inject.setStaticField(CommitLogBucket.class, "bucketIdSupplier", fakeBucketIdSupplier);
|
||||
|
||||
// Create some fake TLDs to parent RegistryCursor test objects under.
|
||||
createTld("tld1");
|
||||
createTld("tld2");
|
||||
createTld("tld3");
|
||||
clock.advanceOneMilli();
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_readBucketTimestamps_noCommitLogs() {
|
||||
assertThat(strategy.readBucketTimestamps())
|
||||
.containsExactly(1, START_OF_TIME, 2, START_OF_TIME, 3, START_OF_TIME);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_readBucketTimestamps_withSomeCommitLogs() {
|
||||
DateTime startTime = clock.nowUtc();
|
||||
writeCommitLogToBucket(1);
|
||||
clock.advanceOneMilli();
|
||||
writeCommitLogToBucket(2);
|
||||
assertThat(strategy.readBucketTimestamps())
|
||||
.containsExactly(1, startTime, 2, startTime.plusMillis(1), 3, START_OF_TIME);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_readBucketTimestamps_againAfterUpdate_reflectsUpdate() {
|
||||
DateTime firstTime = clock.nowUtc();
|
||||
writeCommitLogToBucket(1);
|
||||
writeCommitLogToBucket(2);
|
||||
writeCommitLogToBucket(3);
|
||||
assertThat(strategy.readBucketTimestamps().values())
|
||||
.containsExactly(firstTime, firstTime, firstTime);
|
||||
clock.advanceOneMilli();
|
||||
writeCommitLogToBucket(1);
|
||||
DateTime secondTime = clock.nowUtc();
|
||||
assertThat(strategy.readBucketTimestamps())
|
||||
.containsExactly(1, secondTime, 2, firstTime, 3, firstTime);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_readNewCommitLogsAndFindThreshold_noCommitsAtAll_returnsEndOfTime() {
|
||||
ImmutableMap<Integer, DateTime> bucketTimes =
|
||||
ImmutableMap.of(1, START_OF_TIME, 2, START_OF_TIME, 3, START_OF_TIME);
|
||||
assertThat(strategy.readNewCommitLogsAndFindThreshold(bucketTimes)).isEqualTo(END_OF_TIME);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_readNewCommitLogsAndFindThreshold_noNewCommits_returnsEndOfTime() {
|
||||
DateTime now = clock.nowUtc();
|
||||
writeCommitLogToBucket(1);
|
||||
clock.advanceOneMilli();
|
||||
writeCommitLogToBucket(2);
|
||||
clock.advanceOneMilli();
|
||||
writeCommitLogToBucket(3);
|
||||
ImmutableMap<Integer, DateTime> bucketTimes =
|
||||
ImmutableMap.of(1, now, 2, now.plusMillis(1), 3, now.plusMillis(2));
|
||||
assertThat(strategy.readNewCommitLogsAndFindThreshold(bucketTimes)).isEqualTo(END_OF_TIME);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_readNewCommitLogsAndFindThreshold_tiedNewCommits_returnsCommitTimeMinusOne() {
|
||||
DateTime now = clock.nowUtc();
|
||||
writeCommitLogToBucket(1);
|
||||
writeCommitLogToBucket(2);
|
||||
writeCommitLogToBucket(3);
|
||||
assertThat(strategy.readNewCommitLogsAndFindThreshold(
|
||||
ImmutableMap.of(1, START_OF_TIME, 2, START_OF_TIME, 3, START_OF_TIME)))
|
||||
.isEqualTo(now.minusMillis(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_readNewCommitLogsAndFindThreshold_someNewCommits_returnsEarliestTimeMinusOne() {
|
||||
DateTime now = clock.nowUtc();
|
||||
writeCommitLogToBucket(1); // 1A
|
||||
writeCommitLogToBucket(2); // 2A
|
||||
writeCommitLogToBucket(3); // 3A
|
||||
clock.advanceBy(Duration.millis(5));
|
||||
writeCommitLogToBucket(1); // 1B
|
||||
writeCommitLogToBucket(2); // 2B
|
||||
writeCommitLogToBucket(3); // 3B
|
||||
clock.advanceBy(Duration.millis(5));
|
||||
writeCommitLogToBucket(1); // 1C
|
||||
writeCommitLogToBucket(2); // 2C
|
||||
writeCommitLogToBucket(3); // 3C
|
||||
// First pass times: 1 at T0, 2 at T+5, 3 at T+10.
|
||||
// Commits 1A, 2B, 3C are the commits seen in the first pass.
|
||||
// Commits 2A, 3A, 3B are all old prior commits that should be ignored.
|
||||
// Commit 1B is the first new commit for bucket 1, at T+5.
|
||||
// Commit 1C is the second new commit for bucket 1, at T+10, and should be ignored.
|
||||
// Commit 2C is the first new commit for bucket 2, at T+10.
|
||||
// Since 1B as a new commit is older than 1C, T+5 is the oldest new commit time.
|
||||
// Therefore, expect T+4 as the threshold time.
|
||||
assertThat(strategy.readNewCommitLogsAndFindThreshold(
|
||||
ImmutableMap.of(1, now, 2, now.plusMillis(5), 3, now.plusMillis(10))))
|
||||
.isEqualTo(now.plusMillis(4));
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_readNewCommitLogsAndFindThreshold_commitsAtBucketTimes() {
|
||||
DateTime now = clock.nowUtc();
|
||||
ImmutableMap<Integer, DateTime> bucketTimes =
|
||||
ImmutableMap.of(1, now.minusMillis(1), 2, now, 3, now.plusMillis(1));
|
||||
assertThat(strategy.readNewCommitLogsAndFindThreshold(bucketTimes)).isEqualTo(END_OF_TIME);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_computeBucketCheckpointTimes_earlyThreshold_setsEverythingToThreshold() {
|
||||
DateTime now = clock.nowUtc();
|
||||
ImmutableMap<Integer, DateTime> bucketTimes =
|
||||
ImmutableMap.of(1, now.minusMillis(1), 2, now, 3, now.plusMillis(1));
|
||||
assertThat(strategy.computeBucketCheckpointTimes(bucketTimes, now.minusMillis(2)).values())
|
||||
.containsExactly(now.minusMillis(2), now.minusMillis(2), now.minusMillis(2));
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_computeBucketCheckpointTimes_middleThreshold_clampsToThreshold() {
|
||||
DateTime now = clock.nowUtc();
|
||||
ImmutableMap<Integer, DateTime> bucketTimes =
|
||||
ImmutableMap.of(1, now.minusMillis(1), 2, now, 3, now.plusMillis(1));
|
||||
assertThat(strategy.computeBucketCheckpointTimes(bucketTimes, now))
|
||||
.containsExactly(1, now.minusMillis(1), 2, now, 3, now);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_computeBucketCheckpointTimes_lateThreshold_leavesBucketTimesAsIs() {
|
||||
DateTime now = clock.nowUtc();
|
||||
ImmutableMap<Integer, DateTime> bucketTimes =
|
||||
ImmutableMap.of(1, now.minusMillis(1), 2, now, 3, now.plusMillis(1));
|
||||
assertThat(strategy.computeBucketCheckpointTimes(bucketTimes, now.plusMillis(2)))
|
||||
.isEqualTo(bucketTimes);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_computeCheckpoint_noCommitsAtAll_bucketCheckpointTimesAreStartOfTime() {
|
||||
assertThat(strategy.computeCheckpoint())
|
||||
.isEqualTo(CommitLogCheckpoint.create(
|
||||
clock.nowUtc(),
|
||||
ImmutableMap.of(1, START_OF_TIME, 2, START_OF_TIME, 3, START_OF_TIME)));
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_computeCheckpoint_noNewCommitLogs_bucketCheckpointTimesAreBucketTimes() {
|
||||
DateTime now = clock.nowUtc();
|
||||
writeCommitLogToBucket(1);
|
||||
clock.advanceOneMilli();
|
||||
writeCommitLogToBucket(2);
|
||||
clock.advanceOneMilli();
|
||||
writeCommitLogToBucket(3);
|
||||
clock.advanceOneMilli();
|
||||
DateTime checkpointTime = clock.nowUtc();
|
||||
assertThat(strategy.computeCheckpoint())
|
||||
.isEqualTo(CommitLogCheckpoint.create(
|
||||
checkpointTime,
|
||||
ImmutableMap.of(1, now, 2, now.plusMillis(1), 3, now.plusMillis(2))));
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_computeCheckpoint_someNewCommits_bucketCheckpointTimesAreClampedToThreshold() {
|
||||
DateTime now = clock.nowUtc();
|
||||
writeCommitLogToBucket(1); // 1A
|
||||
writeCommitLogToBucket(2); // 2A
|
||||
writeCommitLogToBucket(3); // 3A
|
||||
clock.advanceBy(Duration.millis(5));
|
||||
writeCommitLogToBucket(1); // 1B
|
||||
writeCommitLogToBucket(2); // 2B
|
||||
writeCommitLogToBucket(3); // 3B
|
||||
clock.advanceBy(Duration.millis(5));
|
||||
writeCommitLogToBucket(1); // 1C
|
||||
writeCommitLogToBucket(2); // 2C
|
||||
writeCommitLogToBucket(3); // 3C
|
||||
|
||||
// Set first pass times: 1 at T0, 2 at T+5, 3 at T+10.
|
||||
saveBucketWithLastWrittenTime(1, now);
|
||||
saveBucketWithLastWrittenTime(2, now.plusMillis(5));
|
||||
saveBucketWithLastWrittenTime(3, now.plusMillis(10));
|
||||
|
||||
// Commits 1A, 2B, 3C are the commits seen in the first pass.
|
||||
// Commits 2A, 3A, 3B are all old prior commits that should be ignored.
|
||||
// Commit 1B is the first new commit for bucket 1, at T+5.
|
||||
// Commit 1C is the second new commit for bucket 1, at T+10, and should be ignored.
|
||||
// Commit 2C is the first new commit for bucket 2, at T+10.
|
||||
// Since 1B as a new commit is older than 1C, T+5 is the oldest new commit time.
|
||||
// Therefore, expect T+4 as the threshold time.
|
||||
DateTime threshold = now.plusMillis(4);
|
||||
|
||||
// Advance clock before taking checkpoint.
|
||||
clock.advanceBy(Duration.millis(10));
|
||||
DateTime checkpointTime = clock.nowUtc();
|
||||
|
||||
// Bucket checkpoint times should be clamped as expected.
|
||||
assertThat(strategy.computeCheckpoint())
|
||||
.isEqualTo(
|
||||
CommitLogCheckpoint.create(
|
||||
checkpointTime, ImmutableMap.of(1, now, 2, threshold, 3, threshold)));
|
||||
}
|
||||
|
||||
private void writeCommitLogToBucket(final int bucketId) {
|
||||
fakeBucketIdSupplier.value = bucketId;
|
||||
tm.transact(
|
||||
() ->
|
||||
tm.put(
|
||||
Cursor.create(
|
||||
RDE_REPORT, tm.getTransactionTime(), Registry.get("tld" + bucketId))));
|
||||
fakeBucketIdSupplier.value = null;
|
||||
}
|
||||
|
||||
private void saveBucketWithLastWrittenTime(final int bucketId, final DateTime lastWrittenTime) {
|
||||
tm.transact(
|
||||
() ->
|
||||
ofy.saveWithoutBackup()
|
||||
.entity(
|
||||
CommitLogBucket.loadBucket(getBucketKey(bucketId))
|
||||
.asBuilder()
|
||||
.setLastWrittenTime(lastWrittenTime)
|
||||
.build()));
|
||||
}
|
||||
}
|
||||
@@ -1,231 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.backup;
|
||||
|
||||
import static com.google.common.base.MoreObjects.firstNonNull;
|
||||
import static com.google.common.base.Preconditions.checkArgument;
|
||||
import static com.google.common.base.Preconditions.checkState;
|
||||
import static com.google.common.collect.ImmutableList.toImmutableList;
|
||||
import static com.google.common.collect.Iterables.concat;
|
||||
import static com.google.common.collect.Lists.partition;
|
||||
import static google.registry.backup.BackupUtils.serializeEntity;
|
||||
import static google.registry.model.ofy.CommitLogBucket.getBucketKey;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.persistence.transaction.TransactionManagerFactory.tm;
|
||||
import static google.registry.util.DateTimeUtils.START_OF_TIME;
|
||||
import static google.registry.util.DateTimeUtils.isAtOrAfter;
|
||||
import static java.util.Comparator.comparingLong;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Streams;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.ofy.CommitLogBucket;
|
||||
import google.registry.model.ofy.CommitLogCheckpoint;
|
||||
import google.registry.model.ofy.CommitLogCheckpointRoot;
|
||||
import google.registry.model.ofy.CommitLogManifest;
|
||||
import google.registry.model.ofy.CommitLogMutation;
|
||||
import google.registry.util.Clock;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import javax.annotation.Nullable;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
/**
|
||||
* Helpers for exporting the diff between two commit log checkpoints to a local file.
|
||||
*
|
||||
* <p>In production, CommitLogs are saved periodically by cron jobs. During each job, the {@link
|
||||
* CommitLogCheckpointAction} is invoked first to compute a {@link CommitLogCheckpoint} and persist
|
||||
* it in Datastore. Then the {@link ExportCommitLogDiffAction} is invoked to export the diffs
|
||||
* accumulated between the previous and current checkpoints to a file.
|
||||
*
|
||||
* <p>The {@link #computeCheckpoint(Clock)} method is copied with simplification from {@link
|
||||
* CommitLogCheckpointAction}, and the {@link #saveCommitLogs(String, CommitLogCheckpoint,
|
||||
* CommitLogCheckpoint)} method is copied with simplification from {@link
|
||||
* ExportCommitLogDiffAction}. We opted for copying instead of refactoring to reduce risk to
|
||||
* production code.
|
||||
*/
|
||||
public final class CommitLogExports {
|
||||
|
||||
public static final String DIFF_FILE_PREFIX = "commit_diff_until_";
|
||||
|
||||
private static final int EXPORT_DIFF_BATCH_SIZE = 100;
|
||||
|
||||
private CommitLogExports() {}
|
||||
|
||||
/**
|
||||
* Returns the next {@link CommitLogCheckpoint} for Commit logs. Please refer to the class javadoc
|
||||
* for background.
|
||||
*/
|
||||
public static CommitLogCheckpoint computeCheckpoint(Clock clock) {
|
||||
CommitLogCheckpointStrategy strategy = new CommitLogCheckpointStrategy();
|
||||
strategy.clock = clock;
|
||||
strategy.ofy = auditedOfy();
|
||||
|
||||
CommitLogCheckpoint checkpoint = strategy.computeCheckpoint();
|
||||
tm().transact(
|
||||
() -> {
|
||||
DateTime lastWrittenTime = CommitLogCheckpointRoot.loadRoot().getLastWrittenTime();
|
||||
checkState(
|
||||
checkpoint.getCheckpointTime().isAfter(lastWrittenTime),
|
||||
"Newer checkpoint already written at time: %s",
|
||||
lastWrittenTime);
|
||||
auditedOfy()
|
||||
.saveWithoutBackup()
|
||||
.entities(
|
||||
checkpoint, CommitLogCheckpointRoot.create(checkpoint.getCheckpointTime()));
|
||||
});
|
||||
return checkpoint;
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the incremental changes between {@code prevCheckpoint} and {@code checkpoint} and returns
|
||||
* the {@link File}. Please refer to class javadoc for background.
|
||||
*/
|
||||
public static File saveCommitLogs(
|
||||
String commitLogDir,
|
||||
@Nullable CommitLogCheckpoint prevCheckpoint,
|
||||
CommitLogCheckpoint checkpoint) {
|
||||
checkArgument(
|
||||
prevCheckpoint == null
|
||||
|| (isAtOrAfter(prevCheckpoint.getCheckpointTime(), START_OF_TIME)
|
||||
&& prevCheckpoint.getCheckpointTime().isBefore(checkpoint.getCheckpointTime())),
|
||||
"Inversed checkpoint: prev is %s, current is %s.",
|
||||
Optional.ofNullable(prevCheckpoint)
|
||||
.map(CommitLogCheckpoint::getCheckpointTime)
|
||||
.map(DateTime::toString)
|
||||
.orElse("null"),
|
||||
checkpoint.getCheckpointTime().toString());
|
||||
|
||||
// Load the keys of all the manifests to include in this diff.
|
||||
List<Key<CommitLogManifest>> sortedKeys = loadAllDiffKeys(prevCheckpoint, checkpoint);
|
||||
// Open an output channel to GCS, wrapped in a stream for convenience.
|
||||
File commitLogFile =
|
||||
new File(commitLogDir + "/" + DIFF_FILE_PREFIX + checkpoint.getCheckpointTime());
|
||||
try (OutputStream commitLogStream =
|
||||
new BufferedOutputStream(new FileOutputStream(commitLogFile))) {
|
||||
// Export the upper checkpoint itself.
|
||||
serializeEntity(checkpoint, commitLogStream);
|
||||
// If there are no manifests to export, stop early, now that we've written out the file with
|
||||
// the checkpoint itself (which is needed for restores, even if it's empty).
|
||||
if (sortedKeys.isEmpty()) {
|
||||
return commitLogFile;
|
||||
}
|
||||
// Export to GCS in chunks, one per fixed batch of commit logs. While processing one batch,
|
||||
// asynchronously load the entities for the next one.
|
||||
List<List<Key<CommitLogManifest>>> keyChunks = partition(sortedKeys, EXPORT_DIFF_BATCH_SIZE);
|
||||
// Objectify's map return type is asynchronous. Calling .values() will block until it loads.
|
||||
Map<?, CommitLogManifest> nextChunkToExport = auditedOfy().load().keys(keyChunks.get(0));
|
||||
for (int i = 0; i < keyChunks.size(); i++) {
|
||||
// Force the async load to finish.
|
||||
Collection<CommitLogManifest> chunkValues = nextChunkToExport.values();
|
||||
// Since there is no hard bound on how much data this might be, take care not to let the
|
||||
// Objectify session cache fill up and potentially run out of memory. This is the only safe
|
||||
// point to do this since at this point there is no async load in progress.
|
||||
auditedOfy().clearSessionCache();
|
||||
// Kick off the next async load, which can happen in parallel to the current GCS export.
|
||||
if (i + 1 < keyChunks.size()) {
|
||||
nextChunkToExport = auditedOfy().load().keys(keyChunks.get(i + 1));
|
||||
}
|
||||
exportChunk(commitLogStream, chunkValues);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return commitLogFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads all the diff keys, sorted in a transaction-consistent chronological order.
|
||||
*
|
||||
* @param lowerCheckpoint exclusive lower bound on keys in this diff, or null if no lower bound
|
||||
* @param upperCheckpoint inclusive upper bound on keys in this diff
|
||||
*/
|
||||
private static ImmutableList<Key<CommitLogManifest>> loadAllDiffKeys(
|
||||
@Nullable final CommitLogCheckpoint lowerCheckpoint,
|
||||
final CommitLogCheckpoint upperCheckpoint) {
|
||||
// Fetch the keys (no data) between these checkpoints, and sort by timestamp. This ordering is
|
||||
// transaction-consistent by virtue of our checkpoint strategy and our customized Ofy; see
|
||||
// CommitLogCheckpointStrategy for the proof. We break ties by sorting on bucket ID to ensure
|
||||
// a deterministic order.
|
||||
return upperCheckpoint.getBucketTimestamps().keySet().stream()
|
||||
.flatMap(
|
||||
bucketNum ->
|
||||
Streams.stream(loadDiffKeysFromBucket(lowerCheckpoint, upperCheckpoint, bucketNum)))
|
||||
.sorted(
|
||||
comparingLong(Key<CommitLogManifest>::getId)
|
||||
.thenComparingLong(a -> a.getParent().getId()))
|
||||
.collect(toImmutableList());
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the diff keys for one bucket.
|
||||
*
|
||||
* @param lowerCheckpoint exclusive lower bound on keys in this diff, or null if no lower bound
|
||||
* @param upperCheckpoint inclusive upper bound on keys in this diff
|
||||
* @param bucketNum the bucket to load diff keys from
|
||||
*/
|
||||
private static Iterable<Key<CommitLogManifest>> loadDiffKeysFromBucket(
|
||||
@Nullable CommitLogCheckpoint lowerCheckpoint,
|
||||
CommitLogCheckpoint upperCheckpoint,
|
||||
int bucketNum) {
|
||||
// If no lower checkpoint exists, or if it exists but had no timestamp for this bucket number
|
||||
// (because the bucket count was increased between these checkpoints), then use START_OF_TIME
|
||||
// as the effective exclusive lower bound.
|
||||
DateTime lowerCheckpointBucketTime =
|
||||
firstNonNull(
|
||||
(lowerCheckpoint == null) ? null : lowerCheckpoint.getBucketTimestamps().get(bucketNum),
|
||||
START_OF_TIME);
|
||||
// Since START_OF_TIME=0 is not a valid id in a key, add 1 to both bounds. Then instead of
|
||||
// loading lowerBound < x <= upperBound, we can load lowerBound <= x < upperBound.
|
||||
DateTime lowerBound = lowerCheckpointBucketTime.plusMillis(1);
|
||||
DateTime upperBound = upperCheckpoint.getBucketTimestamps().get(bucketNum).plusMillis(1);
|
||||
// If the lower and upper bounds are equal, there can't be any results, so skip the query.
|
||||
if (lowerBound.equals(upperBound)) {
|
||||
return ImmutableSet.of();
|
||||
}
|
||||
Key<CommitLogBucket> bucketKey = getBucketKey(bucketNum);
|
||||
return auditedOfy()
|
||||
.load()
|
||||
.type(CommitLogManifest.class)
|
||||
.ancestor(bucketKey)
|
||||
.filterKey(">=", CommitLogManifest.createKey(bucketKey, lowerBound))
|
||||
.filterKey("<", CommitLogManifest.createKey(bucketKey, upperBound))
|
||||
.keys();
|
||||
}
|
||||
|
||||
/** Writes a chunks-worth of manifests and associated mutations to GCS. */
|
||||
private static void exportChunk(OutputStream gcsStream, Collection<CommitLogManifest> chunk)
|
||||
throws IOException {
|
||||
// Kickoff async loads for all the manifests in the chunk.
|
||||
ImmutableList.Builder<Iterable<? extends ImmutableObject>> entities =
|
||||
new ImmutableList.Builder<>();
|
||||
for (CommitLogManifest manifest : chunk) {
|
||||
entities.add(ImmutableList.of(manifest));
|
||||
entities.add(auditedOfy().load().type(CommitLogMutation.class).ancestor(manifest));
|
||||
}
|
||||
for (ImmutableObject entity : concat(entities.build())) {
|
||||
serializeEntity(entity, gcsStream);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,140 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.backup;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import google.registry.model.contact.ContactResource;
|
||||
import google.registry.model.ofy.CommitLogManifest;
|
||||
import google.registry.model.ofy.CommitLogMutation;
|
||||
import google.registry.model.ofy.Ofy;
|
||||
import google.registry.testing.DatabaseHelper;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.testing.FakeResponse;
|
||||
import google.registry.testing.InjectExtension;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import google.registry.testing.mapreduce.MapreduceTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Duration;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link DeleteOldCommitLogsAction}. */
|
||||
public class DeleteOldCommitLogsActionTest
|
||||
extends MapreduceTestCase<DeleteOldCommitLogsAction> {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
private final FakeClock clock = new FakeClock(DateTime.parse("2000-01-01TZ"));
|
||||
private final FakeResponse response = new FakeResponse();
|
||||
private ContactResource contact;
|
||||
|
||||
@RegisterExtension public final InjectExtension inject = new InjectExtension();
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
inject.setStaticField(Ofy.class, "clock", clock);
|
||||
action = new DeleteOldCommitLogsAction();
|
||||
action.mrRunner = makeDefaultRunner();
|
||||
action.response = response;
|
||||
action.clock = clock;
|
||||
action.maxAge = Duration.standardDays(30);
|
||||
|
||||
ContactResource contact = DatabaseHelper.persistActiveContact("TheRegistrar");
|
||||
clock.advanceBy(Duration.standardDays(1));
|
||||
DatabaseHelper.persistResourceWithCommitLog(contact);
|
||||
|
||||
prepareData();
|
||||
}
|
||||
|
||||
private void runMapreduce(Duration maxAge) throws Exception {
|
||||
action.maxAge = maxAge;
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce");
|
||||
auditedOfy().clearSessionCache();
|
||||
}
|
||||
|
||||
private void mutateContact(String email) {
|
||||
auditedOfy().clearSessionCache();
|
||||
ContactResource contact =
|
||||
auditedOfy()
|
||||
.load()
|
||||
.type(ContactResource.class)
|
||||
.first()
|
||||
.now()
|
||||
.asBuilder()
|
||||
.setEmailAddress(email)
|
||||
.build();
|
||||
DatabaseHelper.persistResourceWithCommitLog(contact);
|
||||
}
|
||||
|
||||
private void prepareData() {
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
clock.advanceBy(Duration.standardDays(7));
|
||||
String email = String.format("pumpkin_%d@cat.test", i);
|
||||
mutateContact(email);
|
||||
}
|
||||
auditedOfy().clearSessionCache();
|
||||
|
||||
contact = auditedOfy().load().type(ContactResource.class).first().now();
|
||||
|
||||
// The following value might change if {@link CommitLogRevisionsTranslatorFactory} changes.
|
||||
assertThat(contact.getRevisions()).hasSize(6);
|
||||
|
||||
// Before deleting the unneeded manifests - we have 11 of them (one for the first
|
||||
// creation, and 10 more for the mutateContacts)
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class).count()).isEqualTo(11);
|
||||
// And each DatabaseHelper.persistResourceWithCommitLog creates 3 mutations
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class).count()).isEqualTo(33);
|
||||
}
|
||||
|
||||
private <T> ImmutableList<T> ofyLoadType(Class<T> clazz) {
|
||||
return ImmutableList.copyOf(auditedOfy().load().type(clazz).iterable());
|
||||
}
|
||||
|
||||
/** Check that with very short maxAge, only the referenced elements remain. */
|
||||
@Test
|
||||
void test_shortMaxAge() throws Exception {
|
||||
runMapreduce(Duration.millis(1));
|
||||
|
||||
assertThat(
|
||||
ImmutableList.copyOf(
|
||||
auditedOfy().load().type(CommitLogManifest.class).keys().iterable()))
|
||||
.containsExactlyElementsIn(contact.getRevisions().values());
|
||||
|
||||
// And each DatabaseHelper.persistResourceWithCommitLog creates 3 mutations
|
||||
assertThat(ofyLoadType(CommitLogMutation.class)).hasSize(contact.getRevisions().size() * 3);
|
||||
}
|
||||
|
||||
/** Check that with very long maxAge, all the elements remain. */
|
||||
@Test
|
||||
void test_longMaxAge() throws Exception {
|
||||
|
||||
ImmutableList<CommitLogManifest> initialManifests = ofyLoadType(CommitLogManifest.class);
|
||||
ImmutableList<CommitLogMutation> initialMutations = ofyLoadType(CommitLogMutation.class);
|
||||
|
||||
runMapreduce(Duration.standardDays(1000));
|
||||
|
||||
assertThat(ofyLoadType(CommitLogManifest.class)).containsExactlyElementsIn(initialManifests);
|
||||
assertThat(ofyLoadType(CommitLogMutation.class)).containsExactlyElementsIn(initialMutations);
|
||||
}
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
// Copyright 2021 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.backup;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
|
||||
import com.google.appengine.api.datastore.DatastoreService;
|
||||
import com.google.appengine.api.datastore.DatastoreServiceFactory;
|
||||
import com.google.appengine.api.datastore.Entity;
|
||||
import com.google.appengine.api.datastore.EntityTranslator;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.io.Resources;
|
||||
import com.google.storage.onestore.v3.OnestoreEntity.EntityProto;
|
||||
import google.registry.model.ofy.CommitLogCheckpoint;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.DatastoreEntityExtension;
|
||||
import java.io.InputStream;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
public class EntityImportsTest {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(value = 1)
|
||||
final DatastoreEntityExtension datastoreEntityExtension = new DatastoreEntityExtension();
|
||||
|
||||
@RegisterExtension
|
||||
final AppEngineExtension appEngine =
|
||||
new AppEngineExtension.Builder().withDatastoreAndCloudSql().withoutCannedData().build();
|
||||
|
||||
private DatastoreService datastoreService;
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
datastoreService = DatastoreServiceFactory.getDatastoreService();
|
||||
}
|
||||
|
||||
@Test
|
||||
void importCommitLogs_keysFixed() throws Exception {
|
||||
// Input resource is a standard commit log file whose entities has "AppId_1" as appId. The key
|
||||
// fixes can be verified by checking that the appId of an imported entity's key has been updated
|
||||
// to 'test' (which is set by AppEngineExtension) and/or that after persistence the imported
|
||||
// entity can be loaded by Objectify.
|
||||
try (InputStream commitLogInputStream =
|
||||
Resources.getResource("google/registry/backup/commitlog.data").openStream()) {
|
||||
ImmutableList<Entity> entities =
|
||||
loadEntityProtos(commitLogInputStream).stream()
|
||||
.map(EntityImports::fixEntity)
|
||||
.map(EntityTranslator::createFromPb)
|
||||
.collect(ImmutableList.toImmutableList());
|
||||
// Verifies that the original appId has been overwritten.
|
||||
assertThat(entities.get(0).getKey().getAppId()).isEqualTo("test");
|
||||
datastoreService.put(entities);
|
||||
// Imported entity can be found by Ofy after appId conversion.
|
||||
assertThat(auditedOfy().load().type(CommitLogCheckpoint.class).count()).isGreaterThan(0);
|
||||
}
|
||||
}
|
||||
|
||||
private static ImmutableList<EntityProto> loadEntityProtos(InputStream inputStream) {
|
||||
ImmutableList.Builder<EntityProto> protosBuilder = new ImmutableList.Builder<>();
|
||||
while (true) {
|
||||
EntityProto proto = new EntityProto();
|
||||
boolean parsed = proto.parseDelimitedFrom(inputStream);
|
||||
if (parsed && proto.isInitialized()) {
|
||||
protosBuilder.add(proto);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return protosBuilder.build();
|
||||
}
|
||||
}
|
||||
@@ -1,464 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.backup;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static com.google.common.truth.Truth.assertWithMessage;
|
||||
import static google.registry.backup.BackupUtils.GcsMetadataKeys.LOWER_BOUND_CHECKPOINT;
|
||||
import static google.registry.backup.BackupUtils.GcsMetadataKeys.NUM_TRANSACTIONS;
|
||||
import static google.registry.backup.BackupUtils.GcsMetadataKeys.UPPER_BOUND_CHECKPOINT;
|
||||
import static google.registry.backup.BackupUtils.deserializeEntities;
|
||||
import static google.registry.testing.DatabaseHelper.persistResource;
|
||||
import static google.registry.util.DateTimeUtils.START_OF_TIME;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
import com.google.cloud.storage.BlobId;
|
||||
import com.google.cloud.storage.contrib.nio.testing.LocalStorageHelper;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.gcs.GcsUtils;
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.ofy.CommitLogBucket;
|
||||
import google.registry.model.ofy.CommitLogCheckpoint;
|
||||
import google.registry.model.ofy.CommitLogManifest;
|
||||
import google.registry.model.ofy.CommitLogMutation;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.TestObject;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import java.util.List;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link ExportCommitLogDiffAction}. */
|
||||
public class ExportCommitLogDiffActionTest {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder()
|
||||
.withDatastoreAndCloudSql()
|
||||
.withOfyTestEntities(TestObject.class)
|
||||
.build();
|
||||
|
||||
private final GcsUtils gcsUtils = new GcsUtils(LocalStorageHelper.getOptions());
|
||||
|
||||
private final DateTime now = DateTime.now(UTC);
|
||||
private final DateTime oneMinuteAgo = now.minusMinutes(1);
|
||||
|
||||
private final ExportCommitLogDiffAction task = new ExportCommitLogDiffAction();
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
task.gcsUtils = gcsUtils;
|
||||
task.gcsBucket = "gcs bucket";
|
||||
task.batchSize = 5;
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_noCommitHistory_onlyUpperCheckpointExported() throws Exception {
|
||||
task.lowerCheckpointTime = oneMinuteAgo;
|
||||
task.upperCheckpointTime = now;
|
||||
|
||||
persistResource(CommitLogCheckpoint.create(
|
||||
oneMinuteAgo,
|
||||
ImmutableMap.of(1, oneMinuteAgo, 2, oneMinuteAgo, 3, oneMinuteAgo)));
|
||||
CommitLogCheckpoint upperCheckpoint = persistResource(CommitLogCheckpoint.create(
|
||||
now,
|
||||
ImmutableMap.of(1, now, 2, now, 3, now)));
|
||||
|
||||
// Don't persist any manifests or mutations.
|
||||
|
||||
task.run();
|
||||
|
||||
BlobId expectedFilename = BlobId.of("gcs bucket", "commit_diff_until_" + now);
|
||||
assertWithMessage("GCS file not found: " + expectedFilename)
|
||||
.that(gcsUtils.existsAndNotEmpty(expectedFilename))
|
||||
.isTrue();
|
||||
assertThat(gcsUtils.getMetadata(expectedFilename))
|
||||
.containsExactly(
|
||||
LOWER_BOUND_CHECKPOINT,
|
||||
oneMinuteAgo.toString(),
|
||||
UPPER_BOUND_CHECKPOINT,
|
||||
now.toString(),
|
||||
NUM_TRANSACTIONS,
|
||||
"0");
|
||||
List<ImmutableObject> exported = deserializeEntities(gcsUtils.readBytesFrom(expectedFilename));
|
||||
assertThat(exported).containsExactly(upperCheckpoint);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_regularCommitHistory_exportsCorrectCheckpointDiff() throws Exception {
|
||||
task.lowerCheckpointTime = oneMinuteAgo;
|
||||
task.upperCheckpointTime = now;
|
||||
|
||||
// Persist the lower and upper checkpoints, with 3 buckets each and staggered times. We respect
|
||||
// the real invariant that the time for bucket n in the lower checkpoint is <= the time for
|
||||
// that bucket in the upper.
|
||||
persistResource(CommitLogCheckpoint.create(
|
||||
oneMinuteAgo,
|
||||
ImmutableMap.of(
|
||||
1, oneMinuteAgo,
|
||||
2, oneMinuteAgo.minusDays(1),
|
||||
3, oneMinuteAgo.minusDays(2))));
|
||||
CommitLogCheckpoint upperCheckpoint = persistResource(CommitLogCheckpoint.create(
|
||||
now,
|
||||
ImmutableMap.of(
|
||||
1, now,
|
||||
2, now.minusDays(1),
|
||||
3, oneMinuteAgo.minusDays(2)))); // Note that this matches the lower bound.
|
||||
|
||||
// Persist some fake commit log manifests.
|
||||
// These shouldn't be in the diff because the lower bound is exclusive.
|
||||
persistManifestAndMutation(1, oneMinuteAgo);
|
||||
persistManifestAndMutation(2, oneMinuteAgo.minusDays(1));
|
||||
persistManifestAndMutation(3, oneMinuteAgo.minusDays(2)); // Even though it's == upper bound.
|
||||
// These shouldn't be in the diff because they are above the upper bound.
|
||||
persistManifestAndMutation(1, now.plusMillis(1));
|
||||
persistManifestAndMutation(2, now.minusDays(1).plusMillis(1));
|
||||
persistManifestAndMutation(3, oneMinuteAgo.minusDays(2).plusMillis(1));
|
||||
// These should be in the diff because they are between the bounds. (Not possible for bucket 3.)
|
||||
persistManifestAndMutation(1, now.minusMillis(1));
|
||||
persistManifestAndMutation(2, now.minusDays(1).minusMillis(1));
|
||||
// These should be in the diff because they are at the upper bound. (Not possible for bucket 3.)
|
||||
persistManifestAndMutation(1, now);
|
||||
persistManifestAndMutation(2, now.minusDays(1));
|
||||
|
||||
task.run();
|
||||
|
||||
BlobId expectedFilename = BlobId.of("gcs bucket", "commit_diff_until_" + now);
|
||||
assertWithMessage("GCS file not found: " + expectedFilename)
|
||||
.that(gcsUtils.existsAndNotEmpty(expectedFilename))
|
||||
.isTrue();
|
||||
assertThat(gcsUtils.getMetadata(expectedFilename))
|
||||
.containsExactly(
|
||||
LOWER_BOUND_CHECKPOINT,
|
||||
oneMinuteAgo.toString(),
|
||||
UPPER_BOUND_CHECKPOINT,
|
||||
now.toString(),
|
||||
NUM_TRANSACTIONS,
|
||||
"4");
|
||||
List<ImmutableObject> exported = deserializeEntities(gcsUtils.readBytesFrom(expectedFilename));
|
||||
assertThat(exported.get(0)).isEqualTo(upperCheckpoint);
|
||||
// We expect these manifests, in time order, with matching mutations.
|
||||
CommitLogManifest manifest1 = createManifest(2, now.minusDays(1).minusMillis(1));
|
||||
CommitLogManifest manifest2 = createManifest(2, now.minusDays(1));
|
||||
CommitLogManifest manifest3 = createManifest(1, now.minusMillis(1));
|
||||
CommitLogManifest manifest4 = createManifest(1, now);
|
||||
assertThat(exported).containsExactly(
|
||||
upperCheckpoint,
|
||||
manifest1,
|
||||
createMutation(manifest1),
|
||||
manifest2,
|
||||
createMutation(manifest2),
|
||||
manifest3,
|
||||
createMutation(manifest3),
|
||||
manifest4,
|
||||
createMutation(manifest4))
|
||||
.inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_simultaneousTransactions_bothExported() throws Exception {
|
||||
task.lowerCheckpointTime = oneMinuteAgo;
|
||||
task.upperCheckpointTime = now;
|
||||
|
||||
persistResource(CommitLogCheckpoint.create(
|
||||
oneMinuteAgo,
|
||||
ImmutableMap.of(1, START_OF_TIME, 2, START_OF_TIME, 3, START_OF_TIME)));
|
||||
CommitLogCheckpoint upperCheckpoint = persistResource(CommitLogCheckpoint.create(
|
||||
now,
|
||||
ImmutableMap.of(1, now, 2, now, 3, now)));
|
||||
|
||||
// Persist some fake commit log manifests that are at the same time but in different buckets.
|
||||
persistManifestAndMutation(1, oneMinuteAgo);
|
||||
persistManifestAndMutation(2, oneMinuteAgo);
|
||||
persistManifestAndMutation(1, now);
|
||||
persistManifestAndMutation(2, now);
|
||||
|
||||
task.run();
|
||||
|
||||
BlobId expectedFilename = BlobId.of("gcs bucket", "commit_diff_until_" + now);
|
||||
assertWithMessage("GCS file not found: " + expectedFilename)
|
||||
.that(gcsUtils.existsAndNotEmpty(expectedFilename))
|
||||
.isTrue();
|
||||
assertThat(gcsUtils.getMetadata(expectedFilename))
|
||||
.containsExactly(
|
||||
LOWER_BOUND_CHECKPOINT,
|
||||
oneMinuteAgo.toString(),
|
||||
UPPER_BOUND_CHECKPOINT,
|
||||
now.toString(),
|
||||
NUM_TRANSACTIONS,
|
||||
"4");
|
||||
List<ImmutableObject> exported = deserializeEntities(gcsUtils.readBytesFrom(expectedFilename));
|
||||
assertThat(exported.get(0)).isEqualTo(upperCheckpoint);
|
||||
// We expect these manifests, in the order below, with matching mutations.
|
||||
CommitLogManifest manifest1 = createManifest(1, oneMinuteAgo);
|
||||
CommitLogManifest manifest2 = createManifest(2, oneMinuteAgo);
|
||||
CommitLogManifest manifest3 = createManifest(1, now);
|
||||
CommitLogManifest manifest4 = createManifest(2, now);
|
||||
assertThat(exported).containsExactly(
|
||||
upperCheckpoint,
|
||||
manifest1,
|
||||
createMutation(manifest1),
|
||||
manifest2,
|
||||
createMutation(manifest2),
|
||||
manifest3,
|
||||
createMutation(manifest3),
|
||||
manifest4,
|
||||
createMutation(manifest4))
|
||||
.inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_exportsAcrossMultipleBatches() throws Exception {
|
||||
task.batchSize = 2;
|
||||
task.lowerCheckpointTime = oneMinuteAgo;
|
||||
task.upperCheckpointTime = now;
|
||||
|
||||
persistResource(CommitLogCheckpoint.create(
|
||||
oneMinuteAgo,
|
||||
ImmutableMap.of(1, START_OF_TIME, 2, START_OF_TIME, 3, START_OF_TIME)));
|
||||
CommitLogCheckpoint upperCheckpoint = persistResource(CommitLogCheckpoint.create(
|
||||
now,
|
||||
ImmutableMap.of(1, now, 2, now, 3, now)));
|
||||
|
||||
// Persist some fake commit log manifests.
|
||||
persistManifestAndMutation(1, oneMinuteAgo);
|
||||
persistManifestAndMutation(2, oneMinuteAgo);
|
||||
persistManifestAndMutation(3, oneMinuteAgo);
|
||||
persistManifestAndMutation(1, now);
|
||||
persistManifestAndMutation(2, now);
|
||||
persistManifestAndMutation(3, now);
|
||||
|
||||
task.run();
|
||||
|
||||
BlobId expectedFilename = BlobId.of("gcs bucket", "commit_diff_until_" + now);
|
||||
assertWithMessage("GCS file not found: " + expectedFilename)
|
||||
.that(gcsUtils.existsAndNotEmpty(expectedFilename))
|
||||
.isTrue();
|
||||
assertThat(gcsUtils.getMetadata(expectedFilename))
|
||||
.containsExactly(
|
||||
LOWER_BOUND_CHECKPOINT,
|
||||
oneMinuteAgo.toString(),
|
||||
UPPER_BOUND_CHECKPOINT,
|
||||
now.toString(),
|
||||
NUM_TRANSACTIONS,
|
||||
"6");
|
||||
List<ImmutableObject> exported = deserializeEntities(gcsUtils.readBytesFrom(expectedFilename));
|
||||
assertThat(exported.get(0)).isEqualTo(upperCheckpoint);
|
||||
// We expect these manifests, in the order below, with matching mutations.
|
||||
CommitLogManifest manifest1 = createManifest(1, oneMinuteAgo);
|
||||
CommitLogManifest manifest2 = createManifest(2, oneMinuteAgo);
|
||||
CommitLogManifest manifest3 = createManifest(3, oneMinuteAgo);
|
||||
CommitLogManifest manifest4 = createManifest(1, now);
|
||||
CommitLogManifest manifest5 = createManifest(2, now);
|
||||
CommitLogManifest manifest6 = createManifest(3, now);
|
||||
assertThat(exported).containsExactly(
|
||||
upperCheckpoint,
|
||||
manifest1,
|
||||
createMutation(manifest1),
|
||||
manifest2,
|
||||
createMutation(manifest2),
|
||||
manifest3,
|
||||
createMutation(manifest3),
|
||||
manifest4,
|
||||
createMutation(manifest4),
|
||||
manifest5,
|
||||
createMutation(manifest5),
|
||||
manifest6,
|
||||
createMutation(manifest6))
|
||||
.inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_checkpointDiffWithNeverTouchedBuckets_exportsCorrectly() throws Exception {
|
||||
task.lowerCheckpointTime = oneMinuteAgo;
|
||||
task.upperCheckpointTime = now;
|
||||
|
||||
persistResource(CommitLogCheckpoint.create(
|
||||
oneMinuteAgo,
|
||||
ImmutableMap.of(1, START_OF_TIME, 2, START_OF_TIME, 3, START_OF_TIME)));
|
||||
CommitLogCheckpoint upperCheckpoint = persistResource(CommitLogCheckpoint.create(
|
||||
now,
|
||||
ImmutableMap.of(1, START_OF_TIME, 2, START_OF_TIME, 3, START_OF_TIME)));
|
||||
|
||||
// Don't persist any commit log manifests; we're just checking that the task runs correctly
|
||||
// even if the upper timestamp contains START_OF_TIME values.
|
||||
|
||||
task.run();
|
||||
|
||||
BlobId expectedFilename = BlobId.of("gcs bucket", "commit_diff_until_" + now);
|
||||
assertWithMessage("GCS file not found: " + expectedFilename)
|
||||
.that(gcsUtils.existsAndNotEmpty(expectedFilename))
|
||||
.isTrue();
|
||||
assertThat(gcsUtils.getMetadata(expectedFilename))
|
||||
.containsExactly(
|
||||
LOWER_BOUND_CHECKPOINT,
|
||||
oneMinuteAgo.toString(),
|
||||
UPPER_BOUND_CHECKPOINT,
|
||||
now.toString(),
|
||||
NUM_TRANSACTIONS,
|
||||
"0");
|
||||
List<ImmutableObject> exported = deserializeEntities(gcsUtils.readBytesFrom(expectedFilename));
|
||||
// We expect no manifests or mutations, only the upper checkpoint.
|
||||
assertThat(exported).containsExactly(upperCheckpoint);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_checkpointDiffWithNonExistentBucketTimestamps_exportsCorrectly() throws Exception {
|
||||
// Non-existent bucket timestamps can exist when the commit log bucket count was increased
|
||||
// recently.
|
||||
|
||||
task.lowerCheckpointTime = oneMinuteAgo;
|
||||
task.upperCheckpointTime = now;
|
||||
|
||||
// No lower checkpoint times are persisted for buckets 2 and 3 (simulating a recent increase in
|
||||
// the number of commit log buckets from 1 to 3), so all mutations on buckets 2 and 3, even
|
||||
// those older than the lower checkpoint, will be exported.
|
||||
persistResource(
|
||||
CommitLogCheckpoint.createForTest(oneMinuteAgo, ImmutableMap.of(1, oneMinuteAgo)));
|
||||
CommitLogCheckpoint upperCheckpoint =
|
||||
persistResource(
|
||||
CommitLogCheckpoint.create(
|
||||
now,
|
||||
ImmutableMap.of(
|
||||
1, now,
|
||||
2, now.minusDays(1),
|
||||
3, oneMinuteAgo.minusDays(2))));
|
||||
|
||||
// These shouldn't be in the diff because the lower bound is exclusive.
|
||||
persistManifestAndMutation(1, oneMinuteAgo);
|
||||
// These shouldn't be in the diff because they are above the upper bound.
|
||||
persistManifestAndMutation(1, now.plusMillis(1));
|
||||
persistManifestAndMutation(2, now.minusDays(1).plusMillis(1));
|
||||
persistManifestAndMutation(3, oneMinuteAgo.minusDays(2).plusMillis(1));
|
||||
// These should be in the diff because they happened after START_OF_TIME on buckets with
|
||||
// non-existent timestamps.
|
||||
persistManifestAndMutation(2, oneMinuteAgo.minusDays(1));
|
||||
persistManifestAndMutation(3, oneMinuteAgo.minusDays(2));
|
||||
// These should be in the diff because they are between the bounds.
|
||||
persistManifestAndMutation(1, now.minusMillis(1));
|
||||
persistManifestAndMutation(2, now.minusDays(1).minusMillis(1));
|
||||
// These should be in the diff because they are at the upper bound.
|
||||
persistManifestAndMutation(1, now);
|
||||
persistManifestAndMutation(2, now.minusDays(1));
|
||||
|
||||
task.run();
|
||||
|
||||
BlobId expectedFilename = BlobId.of("gcs bucket", "commit_diff_until_" + now);
|
||||
assertWithMessage("GCS file not found: " + expectedFilename)
|
||||
.that(gcsUtils.existsAndNotEmpty(expectedFilename))
|
||||
.isTrue();
|
||||
assertThat(gcsUtils.getMetadata(expectedFilename))
|
||||
.containsExactly(
|
||||
LOWER_BOUND_CHECKPOINT,
|
||||
oneMinuteAgo.toString(),
|
||||
UPPER_BOUND_CHECKPOINT,
|
||||
now.toString(),
|
||||
NUM_TRANSACTIONS,
|
||||
"6");
|
||||
List<ImmutableObject> exported = deserializeEntities(gcsUtils.readBytesFrom(expectedFilename));
|
||||
assertThat(exported.get(0)).isEqualTo(upperCheckpoint);
|
||||
// We expect these manifests, in time order, with matching mutations.
|
||||
CommitLogManifest manifest1 = createManifest(3, oneMinuteAgo.minusDays(2));
|
||||
CommitLogManifest manifest2 = createManifest(2, oneMinuteAgo.minusDays(1));
|
||||
CommitLogManifest manifest3 = createManifest(2, now.minusDays(1).minusMillis(1));
|
||||
CommitLogManifest manifest4 = createManifest(2, now.minusDays(1));
|
||||
CommitLogManifest manifest5 = createManifest(1, now.minusMillis(1));
|
||||
CommitLogManifest manifest6 = createManifest(1, now);
|
||||
assertThat(exported)
|
||||
.containsExactly(
|
||||
upperCheckpoint,
|
||||
manifest1,
|
||||
createMutation(manifest1),
|
||||
manifest2,
|
||||
createMutation(manifest2),
|
||||
manifest3,
|
||||
createMutation(manifest3),
|
||||
manifest4,
|
||||
createMutation(manifest4),
|
||||
manifest5,
|
||||
createMutation(manifest5),
|
||||
manifest6,
|
||||
createMutation(manifest6))
|
||||
.inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_exportingFromStartOfTime_exportsAllCommits() throws Exception {
|
||||
task.lowerCheckpointTime = START_OF_TIME;
|
||||
task.upperCheckpointTime = now;
|
||||
|
||||
CommitLogCheckpoint upperCheckpoint = persistResource(CommitLogCheckpoint.create(
|
||||
now,
|
||||
ImmutableMap.of(1, now, 2, now, 3, now)));
|
||||
|
||||
// Persist some fake commit log manifests.
|
||||
persistManifestAndMutation(1, START_OF_TIME.plusMillis(1)); // Oldest possible manifest time.
|
||||
persistManifestAndMutation(2, oneMinuteAgo);
|
||||
persistManifestAndMutation(3, now);
|
||||
|
||||
task.run();
|
||||
|
||||
BlobId expectedFilename = BlobId.of("gcs bucket", "commit_diff_until_" + now);
|
||||
assertWithMessage("GCS file not found: " + expectedFilename)
|
||||
.that(gcsUtils.existsAndNotEmpty(expectedFilename))
|
||||
.isTrue();
|
||||
assertThat(gcsUtils.getMetadata(expectedFilename))
|
||||
.containsExactly(
|
||||
LOWER_BOUND_CHECKPOINT,
|
||||
START_OF_TIME.toString(),
|
||||
UPPER_BOUND_CHECKPOINT,
|
||||
now.toString(),
|
||||
NUM_TRANSACTIONS,
|
||||
"3");
|
||||
List<ImmutableObject> exported = deserializeEntities(gcsUtils.readBytesFrom(expectedFilename));
|
||||
assertThat(exported.get(0)).isEqualTo(upperCheckpoint);
|
||||
// We expect these manifests, in the order below, with matching mutations.
|
||||
CommitLogManifest manifest1 = createManifest(1, START_OF_TIME.plusMillis(1));
|
||||
CommitLogManifest manifest2 = createManifest(2, oneMinuteAgo);
|
||||
CommitLogManifest manifest3 = createManifest(3, now);
|
||||
assertThat(exported).containsExactly(
|
||||
upperCheckpoint,
|
||||
manifest1,
|
||||
createMutation(manifest1),
|
||||
manifest2,
|
||||
createMutation(manifest2),
|
||||
manifest3,
|
||||
createMutation(manifest3))
|
||||
.inOrder();
|
||||
}
|
||||
|
||||
private CommitLogManifest createManifest(int bucketNum, DateTime commitTime) {
|
||||
return CommitLogManifest.create(CommitLogBucket.getBucketKey(bucketNum), commitTime, null);
|
||||
}
|
||||
|
||||
private CommitLogMutation createMutation(CommitLogManifest manifest) {
|
||||
return CommitLogMutation.create(
|
||||
Key.create(manifest),
|
||||
TestObject.create(manifest.getCommitTime().toString()));
|
||||
}
|
||||
|
||||
private void persistManifestAndMutation(int bucketNum, DateTime commitTime) {
|
||||
persistResource(
|
||||
createMutation(persistResource(createManifest(bucketNum, commitTime))));
|
||||
}
|
||||
}
|
||||
@@ -1,212 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.backup;
|
||||
|
||||
import static com.google.common.collect.Iterables.transform;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.backup.BackupUtils.GcsMetadataKeys.LOWER_BOUND_CHECKPOINT;
|
||||
import static google.registry.backup.ExportCommitLogDiffAction.DIFF_FILE_PREFIX;
|
||||
import static google.registry.backup.GcsDiffFileLister.getCommitLogDiffPrefix;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.spy;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import com.google.cloud.storage.BlobId;
|
||||
import com.google.cloud.storage.BlobInfo;
|
||||
import com.google.cloud.storage.contrib.nio.testing.LocalStorageHelper;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.testing.TestLogHandler;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
import google.registry.gcs.GcsUtils;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.util.JdkLoggerConfig;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.logging.LogRecord;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link GcsDiffFileLister}. */
|
||||
public class GcsDiffFileListerTest {
|
||||
|
||||
private static final String GCS_BUCKET = "gcs bucket";
|
||||
|
||||
private final DateTime now = DateTime.now(UTC);
|
||||
private final GcsDiffFileLister diffLister = new GcsDiffFileLister();
|
||||
private final GcsUtils gcsUtils = new GcsUtils(LocalStorageHelper.getOptions());
|
||||
private final TestLogHandler logHandler = new TestLogHandler();
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().build();
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() throws Exception {
|
||||
diffLister.gcsUtils = gcsUtils;
|
||||
diffLister.executorProvider = MoreExecutors::newDirectExecutorService;
|
||||
diffLister.scheduledExecutorService = Executors.newSingleThreadScheduledExecutor();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
addGcsFile(i, i + 1);
|
||||
}
|
||||
JdkLoggerConfig.getConfig(GcsDiffFileLister.class).addHandler(logHandler);
|
||||
}
|
||||
|
||||
private Iterable<DateTime> extractTimesFromDiffFiles(ImmutableList<BlobInfo> diffFiles) {
|
||||
return transform(
|
||||
diffFiles,
|
||||
blobInfo -> DateTime.parse(blobInfo.getName().substring(DIFF_FILE_PREFIX.length())));
|
||||
}
|
||||
|
||||
private Iterable<DateTime> listDiffFiles(DateTime fromTime, DateTime toTime) {
|
||||
return extractTimesFromDiffFiles(diffLister.listDiffFiles(GCS_BUCKET, fromTime, toTime));
|
||||
}
|
||||
|
||||
private void addGcsFile(int fileAge, int prevAge) throws IOException {
|
||||
BlobInfo blobInfo =
|
||||
BlobInfo.newBuilder(BlobId.of(GCS_BUCKET, DIFF_FILE_PREFIX + now.minusMinutes(fileAge)))
|
||||
.setMetadata(
|
||||
ImmutableMap.of(LOWER_BOUND_CHECKPOINT, now.minusMinutes(prevAge).toString()))
|
||||
.build();
|
||||
gcsUtils.createFromBytes(blobInfo, new byte[] {1, 2, 3});
|
||||
}
|
||||
|
||||
private void assertLogContains(String message) {
|
||||
for (LogRecord entry : logHandler.getStoredLogRecords()) {
|
||||
if (entry.getMessage().contains(message)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
fail("No log entry contains " + message);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testList_noFilesFound() {
|
||||
DateTime fromTime = now.plusMillis(1);
|
||||
assertThat(listDiffFiles(fromTime, null)).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testList_patchesHoles() throws Exception {
|
||||
GcsUtils mockGcsUtils = mock(GcsUtils.class);
|
||||
diffLister.gcsUtils = spy(gcsUtils);
|
||||
when(mockGcsUtils.listFolderObjects(anyString(), anyString()))
|
||||
.thenReturn(ImmutableList.of(now.toString(), now.minusMinutes(4).toString()));
|
||||
DateTime fromTime = now.minusMinutes(4).minusSeconds(1);
|
||||
// Request all files with checkpoint > fromTime.
|
||||
assertThat(listDiffFiles(fromTime, null))
|
||||
.containsExactly(
|
||||
now.minusMinutes(4),
|
||||
now.minusMinutes(3),
|
||||
now.minusMinutes(2),
|
||||
now.minusMinutes(1),
|
||||
now)
|
||||
.inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testList_failsOnFork() throws Exception {
|
||||
// We currently have files for now-4m ... now, construct the following sequence:
|
||||
// now-8m <- now-7m <- now-6m now-5m <- now-4m ... now
|
||||
// ^___________________________|
|
||||
addGcsFile(5, 8);
|
||||
for (int i = 6; i < 9; ++i) {
|
||||
addGcsFile(i, i + 1);
|
||||
}
|
||||
|
||||
assertThrows(IllegalStateException.class, () -> listDiffFiles(now.minusMinutes(9), null));
|
||||
assertLogContains(String.format(
|
||||
"Found sequence from %s to %s", now.minusMinutes(9), now));
|
||||
assertLogContains(String.format(
|
||||
"Found sequence from %s to %s", now.minusMinutes(9), now.minusMinutes(6)));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testList_boundaries() {
|
||||
assertThat(listDiffFiles(now.minusMinutes(4), now))
|
||||
.containsExactly(
|
||||
now.minusMinutes(4),
|
||||
now.minusMinutes(3),
|
||||
now.minusMinutes(2),
|
||||
now.minusMinutes(1),
|
||||
now)
|
||||
.inOrder();
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testList_failsOnGaps() throws Exception {
|
||||
// We currently have files for now-4m ... now, construct the following sequence:
|
||||
// now-8m <- now-7m <- now-6m {missing} <- now-4m ... now
|
||||
for (int i = 6; i < 9; ++i) {
|
||||
addGcsFile(i, i + 1);
|
||||
}
|
||||
|
||||
assertThrows(IllegalStateException.class, () -> listDiffFiles(now.minusMinutes(9), null));
|
||||
assertLogContains(String.format(
|
||||
"Gap discovered in sequence terminating at %s, missing file: commit_diff_until_%s",
|
||||
now, now.minusMinutes(5)));
|
||||
assertLogContains(String.format(
|
||||
"Found sequence from %s to %s", now.minusMinutes(9), now.minusMinutes(6)));
|
||||
assertLogContains(String.format(
|
||||
"Found sequence from %s to %s", now.minusMinutes(5), now));
|
||||
|
||||
// Verify that we can work around the gap.
|
||||
DateTime fromTime = now.minusMinutes(4).minusSeconds(1);
|
||||
assertThat(listDiffFiles(fromTime, null))
|
||||
.containsExactly(
|
||||
now.minusMinutes(4),
|
||||
now.minusMinutes(3),
|
||||
now.minusMinutes(2),
|
||||
now.minusMinutes(1),
|
||||
now)
|
||||
.inOrder();
|
||||
assertThat(listDiffFiles(
|
||||
now.minusMinutes(8).minusSeconds(1), now.minusMinutes(6).plusSeconds(1)))
|
||||
.containsExactly(
|
||||
now.minusMinutes(8),
|
||||
now.minusMinutes(7),
|
||||
now.minusMinutes(6))
|
||||
.inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testList_toTimeSpecified() {
|
||||
assertThat(
|
||||
listDiffFiles(now.minusMinutes(4).minusSeconds(1), now.minusMinutes(2).plusSeconds(1)))
|
||||
.containsExactly(now.minusMinutes(4), now.minusMinutes(3), now.minusMinutes(2))
|
||||
.inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPrefix_lengthened() {
|
||||
DateTime from = DateTime.parse("2021-05-11T06:48:00.070Z");
|
||||
assertThat(getCommitLogDiffPrefix(from, null)).isEqualTo("commit_diff_until_");
|
||||
assertThat(getCommitLogDiffPrefix(from, DateTime.parse("2021-07-01")))
|
||||
.isEqualTo("commit_diff_until_2021-");
|
||||
assertThat(getCommitLogDiffPrefix(from, DateTime.parse("2021-05-21")))
|
||||
.isEqualTo("commit_diff_until_2021-05-");
|
||||
assertThat(getCommitLogDiffPrefix(from, DateTime.parse("2021-05-11T09:48:00.070Z")))
|
||||
.isEqualTo("commit_diff_until_2021-05-11T");
|
||||
assertThat(getCommitLogDiffPrefix(from, DateTime.parse("2021-05-11T06:59:00.070Z")))
|
||||
.isEqualTo("commit_diff_until_2021-05-11T06:");
|
||||
}
|
||||
}
|
||||
@@ -1,346 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.backup;
|
||||
|
||||
import static com.google.common.collect.Iterables.transform;
|
||||
import static com.google.common.collect.Maps.toMap;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.backup.BackupUtils.GcsMetadataKeys.LOWER_BOUND_CHECKPOINT;
|
||||
import static google.registry.backup.BackupUtils.serializeEntity;
|
||||
import static google.registry.backup.ExportCommitLogDiffAction.DIFF_FILE_PREFIX;
|
||||
import static google.registry.model.ofy.CommitLogBucket.getBucketIds;
|
||||
import static google.registry.model.ofy.CommitLogBucket.getBucketKey;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
import com.google.appengine.api.datastore.DatastoreServiceFactory;
|
||||
import com.google.cloud.storage.BlobId;
|
||||
import com.google.cloud.storage.BlobInfo;
|
||||
import com.google.cloud.storage.contrib.nio.testing.LocalStorageHelper;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.io.Resources;
|
||||
import com.google.common.primitives.Longs;
|
||||
import com.google.common.util.concurrent.MoreExecutors;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.gcs.GcsUtils;
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.domain.DomainBase;
|
||||
import google.registry.model.ofy.CommitLogBucket;
|
||||
import google.registry.model.ofy.CommitLogCheckpoint;
|
||||
import google.registry.model.ofy.CommitLogCheckpointRoot;
|
||||
import google.registry.model.ofy.CommitLogManifest;
|
||||
import google.registry.model.ofy.CommitLogMutation;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.testing.FakeSleeper;
|
||||
import google.registry.testing.TestObject;
|
||||
import google.registry.util.Retrier;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.Executors;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link RestoreCommitLogsAction}. */
|
||||
public class RestoreCommitLogsActionTest {
|
||||
|
||||
static final String GCS_BUCKET = "gcs bucket";
|
||||
|
||||
private final DateTime now = DateTime.now(UTC);
|
||||
private final RestoreCommitLogsAction action = new RestoreCommitLogsAction();
|
||||
private final GcsUtils gcsUtils = new GcsUtils(LocalStorageHelper.getOptions());
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder()
|
||||
.withDatastoreAndCloudSql()
|
||||
.withOfyTestEntities(TestObject.class)
|
||||
.build();
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
action.gcsUtils = gcsUtils;
|
||||
action.dryRun = false;
|
||||
action.datastoreService = DatastoreServiceFactory.getDatastoreService();
|
||||
action.fromTime = now.minusMillis(1);
|
||||
action.retrier = new Retrier(new FakeSleeper(new FakeClock()), 1);
|
||||
action.defaultGcsBucket = GCS_BUCKET;
|
||||
action.gcsBucketOverride = Optional.empty();
|
||||
action.diffLister = new GcsDiffFileLister();
|
||||
action.diffLister.gcsUtils = gcsUtils;
|
||||
action.diffLister.executorProvider = MoreExecutors::newDirectExecutorService;
|
||||
action.diffLister.scheduledExecutorService = Executors.newSingleThreadScheduledExecutor();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRestore_multipleDiffFiles() throws Exception {
|
||||
auditedOfy()
|
||||
.saveWithoutBackup()
|
||||
.entities(TestObject.create("previous to keep"), TestObject.create("previous to delete"))
|
||||
.now();
|
||||
// Create 3 transactions, across two diff files.
|
||||
// Before: {"previous to keep", "previous to delete"}
|
||||
// 1a: Add {"a", "b"}, Delete {"previous to delete"}
|
||||
// 1b: Add {"c", "d"}, Delete {"a"}
|
||||
// 2: Add {"e", "f"}, Delete {"c"}
|
||||
// After: {"previous to keep", "b", "d", "e", "f"}
|
||||
Key<CommitLogManifest> manifest1aKey =
|
||||
CommitLogManifest.createKey(getBucketKey(1), now.minusMinutes(3));
|
||||
Key<CommitLogManifest> manifest1bKey =
|
||||
CommitLogManifest.createKey(getBucketKey(2), now.minusMinutes(2));
|
||||
Key<CommitLogManifest> manifest2Key =
|
||||
CommitLogManifest.createKey(getBucketKey(1), now.minusMinutes(1));
|
||||
saveDiffFileNotToRestore(gcsUtils, now.minusMinutes(2));
|
||||
Iterable<ImmutableObject> file1CommitLogs =
|
||||
saveDiffFile(
|
||||
gcsUtils,
|
||||
createCheckpoint(now.minusMinutes(1)),
|
||||
CommitLogManifest.create(
|
||||
getBucketKey(1),
|
||||
now.minusMinutes(3),
|
||||
ImmutableSet.of(Key.create(TestObject.create("previous to delete")))),
|
||||
CommitLogMutation.create(manifest1aKey, TestObject.create("a")),
|
||||
CommitLogMutation.create(manifest1aKey, TestObject.create("b")),
|
||||
CommitLogManifest.create(
|
||||
getBucketKey(2),
|
||||
now.minusMinutes(2),
|
||||
ImmutableSet.of(Key.create(TestObject.create("a")))),
|
||||
CommitLogMutation.create(manifest1bKey, TestObject.create("c")),
|
||||
CommitLogMutation.create(manifest1bKey, TestObject.create("d")));
|
||||
Iterable<ImmutableObject> file2CommitLogs =
|
||||
saveDiffFile(
|
||||
gcsUtils,
|
||||
createCheckpoint(now),
|
||||
CommitLogManifest.create(
|
||||
getBucketKey(1),
|
||||
now.minusMinutes(1),
|
||||
ImmutableSet.of(Key.create(TestObject.create("c")))),
|
||||
CommitLogMutation.create(manifest2Key, TestObject.create("e")),
|
||||
CommitLogMutation.create(manifest2Key, TestObject.create("f")));
|
||||
action.fromTime = now.minusMinutes(1).minusMillis(1);
|
||||
action.run();
|
||||
auditedOfy().clearSessionCache();
|
||||
assertExpectedIds("previous to keep", "b", "d", "e", "f");
|
||||
assertInDatastore(file1CommitLogs);
|
||||
assertInDatastore(file2CommitLogs);
|
||||
assertInDatastore(CommitLogCheckpointRoot.create(now));
|
||||
assertCommitLogBuckets(ImmutableMap.of(1, now.minusMinutes(1), 2, now.minusMinutes(2)));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRestore_noManifests() throws Exception {
|
||||
auditedOfy().saveWithoutBackup().entity(TestObject.create("previous to keep")).now();
|
||||
saveDiffFileNotToRestore(gcsUtils, now.minusMinutes(1));
|
||||
Iterable<ImmutableObject> commitLogs = saveDiffFile(gcsUtils, createCheckpoint(now));
|
||||
action.run();
|
||||
auditedOfy().clearSessionCache();
|
||||
assertExpectedIds("previous to keep");
|
||||
assertInDatastore(commitLogs);
|
||||
assertInDatastore(CommitLogCheckpointRoot.create(now));
|
||||
assertCommitLogBuckets(ImmutableMap.of());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRestore_manifestWithNoDeletions() throws Exception {
|
||||
auditedOfy().saveWithoutBackup().entity(TestObject.create("previous to keep")).now();
|
||||
Key<CommitLogBucket> bucketKey = getBucketKey(1);
|
||||
Key<CommitLogManifest> manifestKey = CommitLogManifest.createKey(bucketKey, now);
|
||||
saveDiffFileNotToRestore(gcsUtils, now.minusMinutes(1));
|
||||
Iterable<ImmutableObject> commitLogs =
|
||||
saveDiffFile(
|
||||
gcsUtils,
|
||||
createCheckpoint(now),
|
||||
CommitLogManifest.create(bucketKey, now, null),
|
||||
CommitLogMutation.create(manifestKey, TestObject.create("a")),
|
||||
CommitLogMutation.create(manifestKey, TestObject.create("b")));
|
||||
action.run();
|
||||
auditedOfy().clearSessionCache();
|
||||
assertExpectedIds("previous to keep", "a", "b");
|
||||
assertInDatastore(commitLogs);
|
||||
assertInDatastore(CommitLogCheckpointRoot.create(now));
|
||||
assertCommitLogBuckets(ImmutableMap.of(1, now));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRestore_manifestWithNoMutations() throws Exception {
|
||||
auditedOfy()
|
||||
.saveWithoutBackup()
|
||||
.entities(TestObject.create("previous to keep"), TestObject.create("previous to delete"))
|
||||
.now();
|
||||
saveDiffFileNotToRestore(gcsUtils, now.minusMinutes(1));
|
||||
Iterable<ImmutableObject> commitLogs =
|
||||
saveDiffFile(
|
||||
gcsUtils,
|
||||
createCheckpoint(now),
|
||||
CommitLogManifest.create(
|
||||
getBucketKey(1),
|
||||
now,
|
||||
ImmutableSet.of(Key.create(TestObject.create("previous to delete")))));
|
||||
action.run();
|
||||
auditedOfy().clearSessionCache();
|
||||
assertExpectedIds("previous to keep");
|
||||
assertInDatastore(commitLogs);
|
||||
assertInDatastore(CommitLogCheckpointRoot.create(now));
|
||||
assertCommitLogBuckets(ImmutableMap.of(1, now));
|
||||
}
|
||||
|
||||
// This is a pathological case that shouldn't be possible, but we should be robust to it.
|
||||
@Test
|
||||
void testRestore_manifestWithNoMutationsOrDeletions() throws Exception {
|
||||
auditedOfy().saveWithoutBackup().entities(TestObject.create("previous to keep")).now();
|
||||
saveDiffFileNotToRestore(gcsUtils, now.minusMinutes(1));
|
||||
Iterable<ImmutableObject> commitLogs =
|
||||
saveDiffFile(
|
||||
gcsUtils, createCheckpoint(now), CommitLogManifest.create(getBucketKey(1), now, null));
|
||||
action.run();
|
||||
auditedOfy().clearSessionCache();
|
||||
assertExpectedIds("previous to keep");
|
||||
assertInDatastore(commitLogs);
|
||||
assertInDatastore(CommitLogCheckpointRoot.create(now));
|
||||
assertCommitLogBuckets(ImmutableMap.of(1, now));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRestore_mutateExistingEntity() throws Exception {
|
||||
auditedOfy().saveWithoutBackup().entity(TestObject.create("existing", "a")).now();
|
||||
Key<CommitLogManifest> manifestKey = CommitLogManifest.createKey(getBucketKey(1), now);
|
||||
saveDiffFileNotToRestore(gcsUtils, now.minusMinutes(1));
|
||||
Iterable<ImmutableObject> commitLogs =
|
||||
saveDiffFile(
|
||||
gcsUtils,
|
||||
createCheckpoint(now),
|
||||
CommitLogManifest.create(getBucketKey(1), now, null),
|
||||
CommitLogMutation.create(manifestKey, TestObject.create("existing", "b")));
|
||||
action.run();
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(auditedOfy().load().entity(TestObject.create("existing")).now().getField())
|
||||
.isEqualTo("b");
|
||||
assertInDatastore(commitLogs);
|
||||
assertInDatastore(CommitLogCheckpointRoot.create(now));
|
||||
assertCommitLogBuckets(ImmutableMap.of(1, now));
|
||||
}
|
||||
|
||||
// This should be harmless; deletes are idempotent.
|
||||
@Test
|
||||
void testRestore_deleteMissingEntity() throws Exception {
|
||||
auditedOfy().saveWithoutBackup().entity(TestObject.create("previous to keep", "a")).now();
|
||||
saveDiffFileNotToRestore(gcsUtils, now.minusMinutes(1));
|
||||
Iterable<ImmutableObject> commitLogs =
|
||||
saveDiffFile(
|
||||
gcsUtils,
|
||||
createCheckpoint(now),
|
||||
CommitLogManifest.create(
|
||||
getBucketKey(1),
|
||||
now,
|
||||
ImmutableSet.of(Key.create(TestObject.create("previous to delete")))));
|
||||
action.run();
|
||||
auditedOfy().clearSessionCache();
|
||||
assertExpectedIds("previous to keep");
|
||||
assertInDatastore(commitLogs);
|
||||
assertCommitLogBuckets(ImmutableMap.of(1, now));
|
||||
assertInDatastore(CommitLogCheckpointRoot.create(now));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRestore_fromOtherProject() throws IOException {
|
||||
// Input resource is a standard commit log file whose entities has "AppId_1" as appId. Among the
|
||||
// entities are CommitLogMutations that have an embedded DomainBase and a ContactResource, both
|
||||
// having "AppId_1" as appId. This test verifies that the embedded entities are properly
|
||||
// imported, in particular, the domain's 'registrant' key can be used by Objectify to load the
|
||||
// contact.
|
||||
saveDiffFile(
|
||||
gcsUtils,
|
||||
Resources.toByteArray(Resources.getResource("google/registry/backup/commitlog.data")),
|
||||
now);
|
||||
action.run();
|
||||
auditedOfy().clearSessionCache();
|
||||
List<DomainBase> domainBases = auditedOfy().load().type(DomainBase.class).list();
|
||||
assertThat(domainBases).hasSize(1);
|
||||
DomainBase domainBase = domainBases.get(0);
|
||||
// If the registrant is found, then the key instance in domainBase is fixed.
|
||||
assertThat(auditedOfy().load().key(domainBase.getRegistrant().getOfyKey()).now()).isNotNull();
|
||||
}
|
||||
|
||||
static CommitLogCheckpoint createCheckpoint(DateTime now) {
|
||||
return CommitLogCheckpoint.create(now, toMap(getBucketIds(), x -> now));
|
||||
}
|
||||
|
||||
static void saveDiffFile(GcsUtils gcsUtils, byte[] rawBytes, DateTime timestamp)
|
||||
throws IOException {
|
||||
BlobInfo blobInfo =
|
||||
BlobInfo.newBuilder(BlobId.of(GCS_BUCKET, DIFF_FILE_PREFIX + timestamp))
|
||||
.setMetadata(
|
||||
ImmutableMap.of(LOWER_BOUND_CHECKPOINT, timestamp.minusMinutes(1).toString()))
|
||||
.build();
|
||||
gcsUtils.createFromBytes(blobInfo, rawBytes);
|
||||
}
|
||||
|
||||
static Iterable<ImmutableObject> saveDiffFile(
|
||||
GcsUtils gcsUtils, CommitLogCheckpoint checkpoint, ImmutableObject... entities)
|
||||
throws IOException {
|
||||
DateTime now = checkpoint.getCheckpointTime();
|
||||
List<ImmutableObject> allEntities = Lists.asList(checkpoint, entities);
|
||||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
for (ImmutableObject entity : allEntities) {
|
||||
serializeEntity(entity, output);
|
||||
}
|
||||
saveDiffFile(gcsUtils, output.toByteArray(), now);
|
||||
return allEntities;
|
||||
}
|
||||
|
||||
static void saveDiffFileNotToRestore(GcsUtils gcsUtils, DateTime now) throws Exception {
|
||||
saveDiffFile(
|
||||
gcsUtils,
|
||||
createCheckpoint(now),
|
||||
CommitLogManifest.create(getBucketKey(1), now, null),
|
||||
CommitLogMutation.create(
|
||||
CommitLogManifest.createKey(getBucketKey(1), now),
|
||||
TestObject.create("should not be restored")));
|
||||
}
|
||||
|
||||
private void assertExpectedIds(String... ids) {
|
||||
assertThat(transform(auditedOfy().load().type(TestObject.class), TestObject::getId))
|
||||
.containsExactly((Object[]) ids);
|
||||
}
|
||||
|
||||
private void assertInDatastore(ImmutableObject entity) {
|
||||
assertThat(auditedOfy().load().entity(entity).now()).isEqualTo(entity);
|
||||
}
|
||||
|
||||
private void assertInDatastore(Iterable<? extends ImmutableObject> entities) {
|
||||
assertThat(auditedOfy().load().entities(entities).values()).containsExactlyElementsIn(entities);
|
||||
}
|
||||
|
||||
private void assertCommitLogBuckets(Map<Integer, DateTime> bucketIdsAndTimestamps) {
|
||||
Map<Long, CommitLogBucket> buckets =
|
||||
auditedOfy()
|
||||
.load()
|
||||
.type(CommitLogBucket.class)
|
||||
.ids(Longs.asList(Longs.toArray(CommitLogBucket.getBucketIds())));
|
||||
assertThat(buckets).hasSize(bucketIdsAndTimestamps.size());
|
||||
for (Entry<Integer, DateTime> bucketIdAndTimestamp : bucketIdsAndTimestamps.entrySet()) {
|
||||
assertThat(buckets.get((long) bucketIdAndTimestamp.getKey()).getLastWrittenTime())
|
||||
.isEqualTo(bucketIdAndTimestamp.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -38,6 +38,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.config.RegistryEnvironment;
|
||||
import google.registry.dns.DnsQueue;
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.billing.BillingEvent;
|
||||
import google.registry.model.billing.BillingEvent.Reason;
|
||||
@@ -49,27 +50,39 @@ import google.registry.model.poll.PollMessage;
|
||||
import google.registry.model.reporting.HistoryEntry;
|
||||
import google.registry.model.tld.Registry;
|
||||
import google.registry.model.tld.Registry.TldType;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.DualDatabaseTest;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.testing.FakeResponse;
|
||||
import google.registry.testing.SystemPropertyExtension;
|
||||
import google.registry.testing.TestOfyAndSql;
|
||||
import google.registry.testing.mapreduce.MapreduceTestCase;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import org.joda.money.Money;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link DeleteProberDataAction}. */
|
||||
@DualDatabaseTest
|
||||
class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataAction> {
|
||||
class DeleteProberDataActionTest {
|
||||
|
||||
private static final DateTime DELETION_TIME = DateTime.parse("2010-01-01T00:00:00.000Z");
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder()
|
||||
.withDatastoreAndCloudSql()
|
||||
.withLocalModules()
|
||||
.withTaskQueue()
|
||||
.build();
|
||||
|
||||
@RegisterExtension
|
||||
final SystemPropertyExtension systemPropertyExtension = new SystemPropertyExtension();
|
||||
|
||||
private DeleteProberDataAction action;
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
// Entities in these two should not be touched.
|
||||
@@ -94,17 +107,17 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
|
||||
private void resetAction() {
|
||||
action = new DeleteProberDataAction();
|
||||
action.mrRunner = makeDefaultRunner();
|
||||
action.dnsQueue = DnsQueue.createForTesting(new FakeClock());
|
||||
action.response = new FakeResponse();
|
||||
action.isDryRun = false;
|
||||
action.tlds = ImmutableSet.of();
|
||||
action.registryAdminRegistrarId = "TheRegistrar";
|
||||
RegistryEnvironment.SANDBOX.setup(systemPropertyExtension);
|
||||
// RegistryEnvironment.SANDBOX.setup(systemPropertyExtension);
|
||||
}
|
||||
|
||||
private void runMapreduce() throws Exception {
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce");
|
||||
@AfterEach
|
||||
void afterEach() {
|
||||
RegistryEnvironment.UNITTEST.setup(systemPropertyExtension);
|
||||
}
|
||||
|
||||
@TestOfyAndSql
|
||||
@@ -114,7 +127,7 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
Set<ImmutableObject> notTestEntities = persistLotsOfDomains("not-test.test");
|
||||
Set<ImmutableObject> ibEntities = persistLotsOfDomains("ib-any.test");
|
||||
Set<ImmutableObject> oaEntities = persistLotsOfDomains("oa-canary.test");
|
||||
runMapreduce();
|
||||
action.run();
|
||||
assertAllExist(tldEntities);
|
||||
assertAllExist(exampleEntities);
|
||||
assertAllExist(notTestEntities);
|
||||
@@ -130,7 +143,7 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
Set<ImmutableObject> ibEntities = persistLotsOfDomains("ib-any.test");
|
||||
Set<ImmutableObject> oaEntities = persistLotsOfDomains("oa-canary.test");
|
||||
action.tlds = ImmutableSet.of("example", "ib-any.test");
|
||||
runMapreduce();
|
||||
action.run();
|
||||
assertAllExist(tldEntities);
|
||||
assertAllExist(notTestEntities);
|
||||
assertAllExist(oaEntities);
|
||||
@@ -141,8 +154,7 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
@TestOfyAndSql
|
||||
void testFail_givenNonTestTld() {
|
||||
action.tlds = ImmutableSet.of("not-test.test");
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(IllegalArgumentException.class, this::runMapreduce);
|
||||
IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, action::run);
|
||||
assertThat(thrown)
|
||||
.hasMessageThat()
|
||||
.contains("If tlds are given, they must all exist and be TEST tlds");
|
||||
@@ -151,8 +163,7 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
@TestOfyAndSql
|
||||
void testFail_givenNonExistentTld() {
|
||||
action.tlds = ImmutableSet.of("non-existent.test");
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(IllegalArgumentException.class, this::runMapreduce);
|
||||
IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, action::run);
|
||||
assertThat(thrown)
|
||||
.hasMessageThat()
|
||||
.contains("If tlds are given, they must all exist and be TEST tlds");
|
||||
@@ -162,8 +173,7 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
void testFail_givenNonDotTestTldOnProd() {
|
||||
action.tlds = ImmutableSet.of("example");
|
||||
RegistryEnvironment.PRODUCTION.setup(systemPropertyExtension);
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(IllegalArgumentException.class, this::runMapreduce);
|
||||
IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, action::run);
|
||||
assertThat(thrown)
|
||||
.hasMessageThat()
|
||||
.contains("On production, can only work on TLDs that end with .test");
|
||||
@@ -172,15 +182,10 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
@TestOfyAndSql
|
||||
void testSuccess_doesntDeleteNicDomainForProbers() throws Exception {
|
||||
DomainBase nic = persistActiveDomain("nic.ib-any.test");
|
||||
ForeignKeyIndex<DomainBase> fkiNic =
|
||||
ForeignKeyIndex.load(DomainBase.class, "nic.ib-any.test", START_OF_TIME);
|
||||
Set<ImmutableObject> ibEntities = persistLotsOfDomains("ib-any.test");
|
||||
runMapreduce();
|
||||
action.run();
|
||||
assertAllAbsent(ibEntities);
|
||||
assertAllExist(ImmutableSet.of(nic));
|
||||
if (tm().isOfy()) {
|
||||
assertAllExist(ImmutableSet.of(fkiNic));
|
||||
}
|
||||
}
|
||||
|
||||
@TestOfyAndSql
|
||||
@@ -188,7 +193,7 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
Set<ImmutableObject> tldEntities = persistLotsOfDomains("tld");
|
||||
Set<ImmutableObject> oaEntities = persistLotsOfDomains("oa-canary.test");
|
||||
action.isDryRun = true;
|
||||
runMapreduce();
|
||||
action.run();
|
||||
assertAllExist(tldEntities);
|
||||
assertAllExist(oaEntities);
|
||||
}
|
||||
@@ -201,7 +206,7 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
.asBuilder()
|
||||
.setCreationTimeForTest(DateTime.now(UTC).minusYears(1))
|
||||
.build());
|
||||
runMapreduce();
|
||||
action.run();
|
||||
DateTime timeAfterDeletion = DateTime.now(UTC);
|
||||
assertThat(loadByForeignKey(DomainBase.class, "blah.ib-any.test", timeAfterDeletion)).isEmpty();
|
||||
assertThat(loadByEntity(domain).getDeletionTime()).isLessThan(timeAfterDeletion);
|
||||
@@ -215,10 +220,10 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
.asBuilder()
|
||||
.setCreationTimeForTest(DateTime.now(UTC).minusYears(1))
|
||||
.build());
|
||||
runMapreduce();
|
||||
action.run();
|
||||
DateTime timeAfterDeletion = DateTime.now(UTC);
|
||||
resetAction();
|
||||
runMapreduce();
|
||||
action.run();
|
||||
assertThat(loadByForeignKey(DomainBase.class, "blah.ib-any.test", timeAfterDeletion))
|
||||
.isEmpty();
|
||||
assertThat(loadByEntity(domain).getDeletionTime()).isLessThan(timeAfterDeletion);
|
||||
@@ -232,7 +237,7 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
.asBuilder()
|
||||
.setCreationTimeForTest(DateTime.now(UTC).minusSeconds(1))
|
||||
.build());
|
||||
runMapreduce();
|
||||
action.run();
|
||||
Optional<DomainBase> domain =
|
||||
loadByForeignKey(DomainBase.class, "blah.ib-any.test", DateTime.now(UTC));
|
||||
assertThat(domain).isPresent();
|
||||
@@ -248,7 +253,7 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
.setCreationTimeForTest(DateTime.now(UTC).minusYears(1))
|
||||
.build());
|
||||
action.isDryRun = true;
|
||||
runMapreduce();
|
||||
action.run();
|
||||
assertThat(loadByEntity(domain).getDeletionTime()).isEqualTo(END_OF_TIME);
|
||||
}
|
||||
|
||||
@@ -264,7 +269,7 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
.setSubordinateHosts(ImmutableSet.of("ns1.blah.ib-any.test"))
|
||||
.build(),
|
||||
DateTime.now(UTC).minusYears(1));
|
||||
runMapreduce();
|
||||
action.run();
|
||||
|
||||
assertAllExist(ImmutableSet.of(domainWithSubord));
|
||||
assertAllAbsent(ImmutableSet.of(nakedDomain));
|
||||
@@ -278,7 +283,7 @@ class DeleteProberDataActionTest extends MapreduceTestCase<DeleteProberDataActio
|
||||
.setCreationTimeForTest(DateTime.now(UTC).minusYears(1))
|
||||
.build());
|
||||
action.registryAdminRegistrarId = null;
|
||||
IllegalStateException thrown = assertThrows(IllegalStateException.class, this::runMapreduce);
|
||||
IllegalStateException thrown = assertThrows(IllegalStateException.class, action::run);
|
||||
assertThat(thrown).hasMessageThat().contains("Registry admin client ID must be configured");
|
||||
}
|
||||
|
||||
|
||||
@@ -17,12 +17,10 @@ package google.registry.batch;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.common.Cursor.CursorType.RECURRING_BILLING;
|
||||
import static google.registry.model.domain.Period.Unit.YEARS;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.model.reporting.HistoryEntry.Type.DOMAIN_AUTORENEW;
|
||||
import static google.registry.model.reporting.HistoryEntry.Type.DOMAIN_CREATE;
|
||||
import static google.registry.persistence.transaction.TransactionManagerFactory.tm;
|
||||
import static google.registry.persistence.transaction.TransactionManagerUtil.transactIfJpaTm;
|
||||
import static google.registry.testing.DatabaseHelper.assertBillingEvents;
|
||||
import static google.registry.testing.DatabaseHelper.assertBillingEventsForResource;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.getHistoryEntriesOfType;
|
||||
@@ -52,27 +50,35 @@ import google.registry.model.reporting.DomainTransactionRecord;
|
||||
import google.registry.model.reporting.DomainTransactionRecord.TransactionReportField;
|
||||
import google.registry.model.reporting.HistoryEntry;
|
||||
import google.registry.model.tld.Registry;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.DualDatabaseTest;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.testing.FakeResponse;
|
||||
import google.registry.testing.TestOfyAndSql;
|
||||
import google.registry.testing.TestOfyOnly;
|
||||
import google.registry.testing.mapreduce.MapreduceTestCase;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import org.joda.money.Money;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link ExpandRecurringBillingEventsAction}. */
|
||||
@DualDatabaseTest
|
||||
public class ExpandRecurringBillingEventsActionTest
|
||||
extends MapreduceTestCase<ExpandRecurringBillingEventsAction> {
|
||||
public class ExpandRecurringBillingEventsActionTest {
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder()
|
||||
.withDatastoreAndCloudSql()
|
||||
.withLocalModules()
|
||||
.withTaskQueue()
|
||||
.build();
|
||||
|
||||
private DateTime currentTestTime = DateTime.parse("1999-01-05T00:00:00Z");
|
||||
private final FakeClock clock = new FakeClock(currentTestTime);
|
||||
|
||||
private ExpandRecurringBillingEventsAction action;
|
||||
private DomainBase domain;
|
||||
private DomainHistory historyEntry;
|
||||
private BillingEvent.Recurring recurring;
|
||||
@@ -80,7 +86,6 @@ public class ExpandRecurringBillingEventsActionTest
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
action = new ExpandRecurringBillingEventsAction();
|
||||
action.mrRunner = makeDefaultRunner();
|
||||
action.clock = clock;
|
||||
action.cursorTimeParam = Optional.empty();
|
||||
action.batchSize = 2;
|
||||
@@ -121,12 +126,10 @@ public class ExpandRecurringBillingEventsActionTest
|
||||
private void runAction() throws Exception {
|
||||
action.response = new FakeResponse();
|
||||
action.run();
|
||||
// Need to save the current test time before running the mapreduce, which increments the clock.
|
||||
// Need to save the current test time before running the action, which increments the clock.
|
||||
// The execution time (e. g. transaction time) is captured when the action starts running so
|
||||
// the passage of time afterward does not affect the timestamp stored in the billing events.
|
||||
currentTestTime = clock.nowUtc();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
auditedOfy().clearSessionCache();
|
||||
}
|
||||
|
||||
private void assertCursorAt(DateTime expectedCursorTime) {
|
||||
@@ -423,7 +426,7 @@ public class ExpandRecurringBillingEventsActionTest
|
||||
@TestOfyAndSql
|
||||
void testSuccess_expandSingleEvent_withCursorPastExpected() throws Exception {
|
||||
persistResource(recurring);
|
||||
// Simulate a quick second run of the mapreduce (this should be a no-op).
|
||||
// Simulate a quick second run of the action (this should be a no-op).
|
||||
saveCursor(clock.nowUtc().minusSeconds(1));
|
||||
runAction();
|
||||
// No new history entries should be generated
|
||||
@@ -801,24 +804,11 @@ public class ExpandRecurringBillingEventsActionTest
|
||||
|
||||
@TestOfyAndSql
|
||||
void testFailure_cursorAtExecutionTime() {
|
||||
// The clock advances one milli on runMapreduce.
|
||||
// The clock advances one milli on run.
|
||||
action.cursorTimeParam = Optional.of(clock.nowUtc().plusMillis(1));
|
||||
IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, this::runAction);
|
||||
assertThat(thrown)
|
||||
.hasMessageThat()
|
||||
.contains("Cursor time must be earlier than execution time.");
|
||||
}
|
||||
|
||||
@TestOfyOnly
|
||||
void testFailure_mapperException_doesNotMoveCursor() throws Exception {
|
||||
saveCursor(START_OF_TIME); // Need a saved cursor to verify that it didn't move.
|
||||
clock.advanceOneMilli();
|
||||
// Set target to a TLD that doesn't exist.
|
||||
recurring = persistResource(recurring.asBuilder().setTargetId("domain.junk").build());
|
||||
runAction();
|
||||
// No new history entries should be generated
|
||||
assertThat(getHistoryEntriesOfType(domain, DOMAIN_AUTORENEW)).isEmpty();
|
||||
assertBillingEvents(recurring); // only the bogus one in Datastore
|
||||
assertCursorAt(START_OF_TIME); // Cursor doesn't move on a failure.
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.batch;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveContact;
|
||||
import static google.registry.testing.DatabaseHelper.persistContactWithPendingTransfer;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
import google.registry.model.annotations.DeleteAfterMigration;
|
||||
import google.registry.model.contact.ContactResource;
|
||||
import google.registry.model.transfer.TransferStatus;
|
||||
import google.registry.testing.FakeResponse;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import google.registry.testing.mapreduce.MapreduceTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link ResaveAllEppResourcesAction}. */
|
||||
// No longer needed in SQL. Subject to future removal.
|
||||
@Deprecated
|
||||
@DeleteAfterMigration
|
||||
class ResaveAllEppResourcesActionTest extends MapreduceTestCase<ResaveAllEppResourcesAction> {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
action = new ResaveAllEppResourcesAction();
|
||||
action.mrRunner = makeDefaultRunner();
|
||||
action.response = new FakeResponse();
|
||||
}
|
||||
|
||||
private void runMapreduce() throws Exception {
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce");
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_mapreduceSuccessfullyResavesEntity() throws Exception {
|
||||
ContactResource contact = persistActiveContact("test123");
|
||||
DateTime creationTime = contact.getUpdateTimestamp().getTimestamp();
|
||||
assertThat(auditedOfy().load().entity(contact).now().getUpdateTimestamp().getTimestamp())
|
||||
.isEqualTo(creationTime);
|
||||
auditedOfy().clearSessionCache();
|
||||
runMapreduce();
|
||||
assertThat(auditedOfy().load().entity(contact).now().getUpdateTimestamp().getTimestamp())
|
||||
.isGreaterThan(creationTime);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_fastMode_doesNotResaveEntityWithNoChanges() throws Exception {
|
||||
ContactResource contact = persistActiveContact("test123");
|
||||
DateTime creationTime = contact.getUpdateTimestamp().getTimestamp();
|
||||
assertThat(auditedOfy().load().entity(contact).now().getUpdateTimestamp().getTimestamp())
|
||||
.isEqualTo(creationTime);
|
||||
auditedOfy().clearSessionCache();
|
||||
action.isFast = true;
|
||||
runMapreduce();
|
||||
assertThat(auditedOfy().load().entity(contact).now().getUpdateTimestamp().getTimestamp())
|
||||
.isEqualTo(creationTime);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_mapreduceResolvesPendingTransfer() throws Exception {
|
||||
DateTime now = DateTime.now(UTC);
|
||||
// Set up a contact with a transfer that implicitly completed five days ago.
|
||||
ContactResource contact =
|
||||
persistContactWithPendingTransfer(
|
||||
persistActiveContact("meh789"),
|
||||
now.minusDays(10),
|
||||
now.minusDays(10),
|
||||
now.minusDays(10));
|
||||
assertThat(contact.getTransferData().getTransferStatus()).isEqualTo(TransferStatus.PENDING);
|
||||
runMapreduce();
|
||||
|
||||
auditedOfy().clearSessionCache();
|
||||
// The transfer should be effective after the contact is re-saved, as it should've been
|
||||
// projected to the current time.
|
||||
ContactResource resavedContact = auditedOfy().load().entity(contact).now();
|
||||
assertThat(resavedContact.getTransferData().getTransferStatus())
|
||||
.isEqualTo(TransferStatus.SERVER_APPROVED);
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,7 @@
|
||||
package google.registry.batch;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.batch.ResaveAllEppResourcesPipelineAction.PARAM_FAST;
|
||||
import static google.registry.batch.BatchModule.PARAM_FAST;
|
||||
import static google.registry.batch.ResaveAllEppResourcesPipelineAction.PIPELINE_NAME;
|
||||
import static google.registry.beam.BeamUtils.createJobName;
|
||||
import static javax.servlet.http.HttpServletResponse.SC_OK;
|
||||
@@ -28,7 +28,6 @@ import google.registry.beam.BeamActionTestBase;
|
||||
import google.registry.config.RegistryEnvironment;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.FakeClock;
|
||||
import java.util.Optional;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
@@ -43,13 +42,7 @@ public class ResaveAllEppResourcesPipelineActionTest extends BeamActionTestBase
|
||||
|
||||
private ResaveAllEppResourcesPipelineAction createAction(boolean isFast) {
|
||||
return new ResaveAllEppResourcesPipelineAction(
|
||||
"test-project",
|
||||
"test-region",
|
||||
"staging-bucket",
|
||||
Optional.of(isFast),
|
||||
fakeClock,
|
||||
response,
|
||||
dataflow);
|
||||
"test-project", "test-region", "staging-bucket", isFast, fakeClock, response, dataflow);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.cron;
|
||||
|
||||
import static google.registry.cron.CommitLogFanoutAction.BUCKET_PARAM;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import google.registry.model.ofy.CommitLogBucket;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.CloudTasksHelper;
|
||||
import google.registry.testing.CloudTasksHelper.TaskMatcher;
|
||||
import google.registry.testing.FakeClock;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link CommitLogFanoutAction}. */
|
||||
class CommitLogFanoutActionTest {
|
||||
|
||||
private static final String ENDPOINT = "/the/servlet";
|
||||
private static final String QUEUE = "the-queue";
|
||||
private final CloudTasksHelper cloudTasksHelper = new CloudTasksHelper(new FakeClock());
|
||||
|
||||
@RegisterExtension
|
||||
final AppEngineExtension appEngineExtension =
|
||||
AppEngineExtension.builder()
|
||||
.withDatastoreAndCloudSql()
|
||||
.withTaskQueue(
|
||||
Joiner.on('\n')
|
||||
.join(
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>",
|
||||
"<queue-entries>",
|
||||
" <queue>",
|
||||
" <name>the-queue</name>",
|
||||
" <rate>1/s</rate>",
|
||||
" </queue>",
|
||||
"</queue-entries>"))
|
||||
.build();
|
||||
|
||||
@Test
|
||||
void testSuccess() {
|
||||
CommitLogFanoutAction action = new CommitLogFanoutAction();
|
||||
action.cloudTasksUtils = cloudTasksHelper.getTestCloudTasksUtils();
|
||||
action.endpoint = ENDPOINT;
|
||||
action.queue = QUEUE;
|
||||
action.jitterSeconds = Optional.empty();
|
||||
action.run();
|
||||
List<TaskMatcher> matchers = new ArrayList<>();
|
||||
for (int bucketId : CommitLogBucket.getBucketIds()) {
|
||||
matchers.add(new TaskMatcher().url(ENDPOINT).param(BUCKET_PARAM, Integer.toString(bucketId)));
|
||||
}
|
||||
cloudTasksHelper.assertTasksEnqueued(QUEUE, matchers);
|
||||
}
|
||||
}
|
||||
@@ -32,19 +32,16 @@ import com.google.cloud.storage.StorageException;
|
||||
import com.google.cloud.storage.contrib.nio.testing.LocalStorageHelper;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.net.MediaType;
|
||||
import google.registry.export.ExportDomainListsAction.ExportDomainListsReducer;
|
||||
import google.registry.gcs.GcsUtils;
|
||||
import google.registry.model.ofy.Ofy;
|
||||
import google.registry.model.tld.Registry;
|
||||
import google.registry.model.tld.Registry.TldType;
|
||||
import google.registry.storage.drive.DriveConnection;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.DualDatabaseTest;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.testing.FakeResponse;
|
||||
import google.registry.testing.InjectExtension;
|
||||
import google.registry.testing.TestOfyAndSql;
|
||||
import google.registry.testing.TestOfyOnly;
|
||||
import google.registry.testing.mapreduce.MapreduceTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
@@ -53,14 +50,22 @@ import org.mockito.ArgumentCaptor;
|
||||
|
||||
/** Unit tests for {@link ExportDomainListsAction}. */
|
||||
@DualDatabaseTest
|
||||
class ExportDomainListsActionTest extends MapreduceTestCase<ExportDomainListsAction> {
|
||||
class ExportDomainListsActionTest {
|
||||
|
||||
private final GcsUtils gcsUtils = new GcsUtils(LocalStorageHelper.getOptions());
|
||||
private DriveConnection driveConnection = mock(DriveConnection.class);
|
||||
private ArgumentCaptor<byte[]> bytesExportedToDrive = ArgumentCaptor.forClass(byte[].class);
|
||||
private final FakeResponse response = new FakeResponse();
|
||||
private ExportDomainListsAction action;
|
||||
private final FakeClock clock = new FakeClock(DateTime.parse("2020-02-02T02:02:02Z"));
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder()
|
||||
.withDatastoreAndCloudSql()
|
||||
.withLocalModules()
|
||||
.withTaskQueue()
|
||||
.build();
|
||||
|
||||
@Order(Order.DEFAULT - 1)
|
||||
@RegisterExtension
|
||||
public final InjectExtension inject =
|
||||
@@ -73,22 +78,13 @@ class ExportDomainListsActionTest extends MapreduceTestCase<ExportDomainListsAct
|
||||
persistResource(Registry.get("tld").asBuilder().setDriveFolderId("brouhaha").build());
|
||||
persistResource(Registry.get("testtld").asBuilder().setTldType(TldType.TEST).build());
|
||||
|
||||
ExportDomainListsReducer.setDriveConnectionForTesting(() -> driveConnection);
|
||||
|
||||
action = new ExportDomainListsAction();
|
||||
action.mrRunner = makeDefaultRunner();
|
||||
action.response = response;
|
||||
action.gcsBucket = "outputbucket";
|
||||
action.gcsUtils = gcsUtils;
|
||||
action.clock = clock;
|
||||
action.driveConnection = driveConnection;
|
||||
}
|
||||
|
||||
private void runAction() throws Exception {
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce");
|
||||
}
|
||||
|
||||
private void verifyExportedToDrive(String folderId, String domains) throws Exception {
|
||||
verify(driveConnection)
|
||||
.createOrUpdateFile(
|
||||
@@ -99,21 +95,12 @@ class ExportDomainListsActionTest extends MapreduceTestCase<ExportDomainListsAct
|
||||
assertThat(new String(bytesExportedToDrive.getValue(), UTF_8)).isEqualTo(domains);
|
||||
}
|
||||
|
||||
@TestOfyOnly
|
||||
void test_writesLinkToMapreduceConsoleToResponse() throws Exception {
|
||||
runAction();
|
||||
assertThat(response.getPayload())
|
||||
.startsWith(
|
||||
"Mapreduce console: https://backend-dot-projectid.appspot.com"
|
||||
+ "/_ah/pipeline/status.html?root=");
|
||||
}
|
||||
|
||||
@TestOfyAndSql
|
||||
void test_outputsOnlyActiveDomains() throws Exception {
|
||||
persistActiveDomain("onetwo.tld");
|
||||
persistActiveDomain("rudnitzky.tld");
|
||||
persistDeletedDomain("mortuary.tld", DateTime.parse("2001-03-14T10:11:12Z"));
|
||||
runAction();
|
||||
action.run();
|
||||
BlobId existingFile = BlobId.of("outputbucket", "tld.txt");
|
||||
String tlds = new String(gcsUtils.readBytesFrom(existingFile), UTF_8);
|
||||
// Check that it only contains the active domains, not the dead one.
|
||||
@@ -127,7 +114,7 @@ class ExportDomainListsActionTest extends MapreduceTestCase<ExportDomainListsAct
|
||||
persistActiveDomain("onetwo.tld");
|
||||
persistActiveDomain("rudnitzky.tld");
|
||||
persistActiveDomain("wontgo.testtld");
|
||||
runAction();
|
||||
action.run();
|
||||
BlobId existingFile = BlobId.of("outputbucket", "tld.txt");
|
||||
String tlds = new String(gcsUtils.readBytesFrom(existingFile), UTF_8).trim();
|
||||
// Check that it only contains the domains on the real TLD, and not the test one.
|
||||
@@ -154,7 +141,7 @@ class ExportDomainListsActionTest extends MapreduceTestCase<ExportDomainListsAct
|
||||
persistActiveDomain("santa.tldtwo");
|
||||
persistActiveDomain("buddy.tldtwo");
|
||||
persistActiveDomain("cupid.tldthree");
|
||||
runAction();
|
||||
action.run();
|
||||
BlobId firstTldFile = BlobId.of("outputbucket", "tld.txt");
|
||||
String tlds = new String(gcsUtils.readBytesFrom(firstTldFile), UTF_8).trim();
|
||||
assertThat(tlds).isEqualTo("dasher.tld\nprancer.tld");
|
||||
|
||||
@@ -80,7 +80,7 @@ public interface EppTestComponent {
|
||||
new DomainFlowTmchUtils(
|
||||
new TmchXmlSignature(new TmchCertificateAuthority(TmchCaMode.PILOT, clock)));
|
||||
instance.sleeper = new FakeSleeper(instance.clock);
|
||||
instance.dnsQueue = DnsQueue.create();
|
||||
instance.dnsQueue = DnsQueue.createForTesting(clock);
|
||||
instance.metricBuilder = EppMetric.builderForRequest(clock);
|
||||
instance.lockHandler = new FakeLockHandler(true);
|
||||
instance.cloudTasksHelper = cloudTasksHelper;
|
||||
|
||||
@@ -206,7 +206,7 @@ class HostUpdateFlowTest extends ResourceFlowTestCase<HostUpdateFlow, HostResour
|
||||
.build());
|
||||
HostResource renamedHost = doSuccessfulTest();
|
||||
assertThat(renamedHost.isSubordinate()).isTrue();
|
||||
// Task enqueued to change the NS record of the referencing domain via mapreduce.
|
||||
// Task enqueued to change the NS record of the referencing domain.
|
||||
assertTasksEnqueued(
|
||||
QUEUE_ASYNC_HOST_RENAME,
|
||||
new TaskMatcher()
|
||||
|
||||
@@ -1,375 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.mapreduce.inputs;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static com.google.common.truth.Truth.assertWithMessage;
|
||||
import static google.registry.mapreduce.inputs.EppResourceInputs.createChildEntityInput;
|
||||
import static google.registry.model.index.EppResourceIndexBucket.getBucketKey;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.newContactResource;
|
||||
import static google.registry.testing.DatabaseHelper.newDomainBase;
|
||||
import static google.registry.testing.DatabaseHelper.persistEppResourceInFirstBucket;
|
||||
import static google.registry.testing.DatabaseHelper.persistResource;
|
||||
import static google.registry.testing.DatabaseHelper.persistSimpleResource;
|
||||
import static google.registry.util.DateTimeUtils.END_OF_TIME;
|
||||
import static org.joda.money.CurrencyUnit.USD;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.google.appengine.tools.mapreduce.InputReader;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.model.EppResource;
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.billing.BillingEvent;
|
||||
import google.registry.model.billing.BillingEvent.Reason;
|
||||
import google.registry.model.contact.ContactHistory;
|
||||
import google.registry.model.contact.ContactResource;
|
||||
import google.registry.model.domain.DomainBase;
|
||||
import google.registry.model.domain.DomainHistory;
|
||||
import google.registry.model.index.EppResourceIndex;
|
||||
import google.registry.model.reporting.HistoryEntry;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.util.HashSet;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Set;
|
||||
import org.joda.money.Money;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Tests {@link ChildEntityInput} */
|
||||
class ChildEntityInputTest {
|
||||
|
||||
private static final DateTime now = DateTime.now(DateTimeZone.UTC);
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@RegisterExtension
|
||||
final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().build();
|
||||
|
||||
private DomainBase domainA;
|
||||
private DomainBase domainB;
|
||||
private DomainHistory domainHistoryEntryA;
|
||||
private DomainHistory domainHistoryEntryB;
|
||||
private ContactHistory contactHistoryEntry;
|
||||
private BillingEvent.OneTime oneTimeA;
|
||||
private BillingEvent.OneTime oneTimeB;
|
||||
private BillingEvent.Recurring recurringA;
|
||||
private BillingEvent.Recurring recurringB;
|
||||
|
||||
private void setupResources() {
|
||||
createTld("tld");
|
||||
ContactResource contact = persistEppResourceInFirstBucket(newContactResource("contact1234"));
|
||||
domainA = persistEppResourceInFirstBucket(newDomainBase("a.tld", contact));
|
||||
domainHistoryEntryA =
|
||||
persistResource(
|
||||
new DomainHistory.Builder()
|
||||
.setDomain(domainA)
|
||||
.setType(HistoryEntry.Type.DOMAIN_CREATE)
|
||||
.setDomain(domainA)
|
||||
.setModificationTime(now)
|
||||
.setRegistrarId(domainA.getCreationRegistrarId())
|
||||
.build());
|
||||
contactHistoryEntry =
|
||||
persistResource(
|
||||
new ContactHistory.Builder()
|
||||
.setContact(contact)
|
||||
.setType(HistoryEntry.Type.CONTACT_CREATE)
|
||||
.setContact(contact)
|
||||
.setModificationTime(now)
|
||||
.setRegistrarId(contact.getCreationRegistrarId())
|
||||
.build());
|
||||
oneTimeA =
|
||||
persistResource(
|
||||
new BillingEvent.OneTime.Builder()
|
||||
.setParent(domainHistoryEntryA)
|
||||
.setReason(Reason.CREATE)
|
||||
.setFlags(ImmutableSet.of(BillingEvent.Flag.ANCHOR_TENANT))
|
||||
.setPeriodYears(2)
|
||||
.setCost(Money.of(USD, 1))
|
||||
.setEventTime(now)
|
||||
.setBillingTime(now.plusDays(5))
|
||||
.setRegistrarId("TheRegistrar")
|
||||
.setTargetId("a.tld")
|
||||
.build());
|
||||
recurringA =
|
||||
persistResource(
|
||||
new BillingEvent.Recurring.Builder()
|
||||
.setParent(domainHistoryEntryA)
|
||||
.setReason(Reason.RENEW)
|
||||
.setEventTime(now.plusYears(1))
|
||||
.setRecurrenceEndTime(END_OF_TIME)
|
||||
.setRegistrarId("TheRegistrar")
|
||||
.setTargetId("a.tld")
|
||||
.build());
|
||||
}
|
||||
|
||||
private void setupSecondDomainBases() {
|
||||
domainB = persistEppResourceInFirstBucket(newDomainBase("b.tld"));
|
||||
domainHistoryEntryB =
|
||||
persistResource(
|
||||
new DomainHistory.Builder()
|
||||
.setDomain(domainB)
|
||||
.setType(HistoryEntry.Type.DOMAIN_CREATE)
|
||||
.setDomain(domainB)
|
||||
.setModificationTime(now)
|
||||
.setRegistrarId(domainB.getCreationRegistrarId())
|
||||
.build());
|
||||
oneTimeB =
|
||||
persistResource(
|
||||
new BillingEvent.OneTime.Builder()
|
||||
.setParent(domainHistoryEntryA)
|
||||
.setReason(Reason.CREATE)
|
||||
.setFlags(ImmutableSet.of(BillingEvent.Flag.ANCHOR_TENANT))
|
||||
.setPeriodYears(2)
|
||||
.setCost(Money.of(USD, 1))
|
||||
.setEventTime(now)
|
||||
.setBillingTime(now.plusDays(5))
|
||||
.setRegistrarId("TheRegistrar")
|
||||
.setTargetId("a.tld")
|
||||
.build());
|
||||
recurringB =
|
||||
persistResource(
|
||||
new BillingEvent.Recurring.Builder()
|
||||
.setParent(domainHistoryEntryA)
|
||||
.setReason(Reason.RENEW)
|
||||
.setEventTime(now.plusYears(1))
|
||||
.setRecurrenceEndTime(END_OF_TIME)
|
||||
.setRegistrarId("TheRegistrar")
|
||||
.setTargetId("a.tld")
|
||||
.build());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T> T serializeAndDeserialize(T obj) throws Exception {
|
||||
try (ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
|
||||
ObjectOutputStream objectOut = new ObjectOutputStream(byteOut)) {
|
||||
objectOut.writeObject(obj);
|
||||
try (ByteArrayInputStream byteIn = new ByteArrayInputStream(byteOut.toByteArray());
|
||||
ObjectInputStream objectIn = new ObjectInputStream(byteIn)) {
|
||||
return (T) objectIn.readObject();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_childEntityReader_multipleParentsAndChildren() throws Exception {
|
||||
setupResources();
|
||||
setupSecondDomainBases();
|
||||
Set<ImmutableObject> seen = new HashSet<>();
|
||||
InputReader<ImmutableObject> reader =
|
||||
EppResourceInputs.createChildEntityInput(
|
||||
ImmutableSet.of(EppResource.class),
|
||||
ImmutableSet.<Class<? extends ImmutableObject>>of(
|
||||
HistoryEntry.class, BillingEvent.OneTime.class, BillingEvent.Recurring.class))
|
||||
.createReaders()
|
||||
.get(0);
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
for (int i = 0; i < 8; i++) {
|
||||
reader.endSlice();
|
||||
reader = serializeAndDeserialize(reader);
|
||||
reader.beginSlice();
|
||||
if (i == 7) {
|
||||
// This final readerCopy is needed for the assertThrows lambda.
|
||||
final InputReader<ImmutableObject> readerCopy = reader;
|
||||
assertThrows(NoSuchElementException.class, () -> seen.add(readerCopy.next()));
|
||||
} else {
|
||||
seen.add(reader.next());
|
||||
}
|
||||
}
|
||||
assertThat(seen)
|
||||
.containsExactly(
|
||||
domainHistoryEntryA.asHistoryEntry(),
|
||||
domainHistoryEntryB.asHistoryEntry(),
|
||||
contactHistoryEntry.asHistoryEntry(),
|
||||
oneTimeA,
|
||||
recurringA,
|
||||
oneTimeB,
|
||||
recurringB);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_childEntityInput_polymorphicBaseType() {
|
||||
createChildEntityInput(ImmutableSet.of(EppResource.class), ImmutableSet.of(BillingEvent.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_childEntityReader_multipleChildTypes() throws Exception {
|
||||
setupResources();
|
||||
InputReader<ImmutableObject> reader =
|
||||
EppResourceInputs.createChildEntityInput(
|
||||
ImmutableSet.of(EppResource.class),
|
||||
ImmutableSet.<Class<? extends ImmutableObject>>of(
|
||||
HistoryEntry.class, BillingEvent.OneTime.class, BillingEvent.Recurring.class))
|
||||
.createReaders()
|
||||
.get(0);
|
||||
assertThat(getAllFromReader(reader))
|
||||
.containsExactly(
|
||||
domainHistoryEntryA.asHistoryEntry(),
|
||||
contactHistoryEntry.asHistoryEntry(),
|
||||
oneTimeA,
|
||||
recurringA);
|
||||
}
|
||||
|
||||
private static Set<ImmutableObject> getAllFromReader(InputReader<ImmutableObject> reader)
|
||||
throws Exception {
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
ImmutableSet.Builder<ImmutableObject> seen = new ImmutableSet.Builder<>();
|
||||
try {
|
||||
while (true) {
|
||||
seen.add(reader.next());
|
||||
}
|
||||
} catch (NoSuchElementException e) {
|
||||
// Swallow; this is expected.
|
||||
}
|
||||
return seen.build();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_childEntityReader_filterParentTypes() throws Exception {
|
||||
setupResources();
|
||||
InputReader<ImmutableObject> reader =
|
||||
EppResourceInputs.createChildEntityInput(
|
||||
ImmutableSet.<Class<? extends EppResource>>of(ContactResource.class),
|
||||
ImmutableSet.<Class<? extends ImmutableObject>>of(
|
||||
HistoryEntry.class, BillingEvent.OneTime.class, BillingEvent.Recurring.class))
|
||||
.createReaders()
|
||||
.get(0);
|
||||
assertThat(getAllFromReader(reader)).containsExactly(contactHistoryEntry.asHistoryEntry());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_childEntityReader_polymorphicChildFiltering() throws Exception {
|
||||
setupResources();
|
||||
InputReader<ImmutableObject> reader =
|
||||
EppResourceInputs.createChildEntityInput(
|
||||
ImmutableSet.of(EppResource.class),
|
||||
ImmutableSet.<Class<? extends ImmutableObject>>of(BillingEvent.OneTime.class))
|
||||
.createReaders()
|
||||
.get(0);
|
||||
assertThat(getAllFromReader(reader)).containsExactly(oneTimeA);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_childEntityReader_polymorphicChildClass() throws Exception {
|
||||
setupResources();
|
||||
InputReader<ImmutableObject> reader =
|
||||
EppResourceInputs.createChildEntityInput(
|
||||
ImmutableSet.of(EppResource.class),
|
||||
ImmutableSet.<Class<? extends ImmutableObject>>of(BillingEvent.class))
|
||||
.createReaders()
|
||||
.get(0);
|
||||
assertThat(getAllFromReader(reader)).containsExactly(oneTimeA, recurringA);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_childEntityReader_noneReturned() throws Exception {
|
||||
createTld("tld");
|
||||
InputReader<ImmutableObject> reader =
|
||||
EppResourceInputs.createChildEntityInput(
|
||||
ImmutableSet.<Class<? extends EppResource>>of(ContactResource.class),
|
||||
ImmutableSet.<Class<? extends ImmutableObject>>of(BillingEvent.OneTime.class))
|
||||
.createReaders()
|
||||
.get(0);
|
||||
assertThat(getAllFromReader(reader)).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_childEntityReader_readerCountMatchesBucketCount() throws Exception {
|
||||
assertThat(
|
||||
EppResourceInputs.createChildEntityInput(
|
||||
ImmutableSet.<Class<? extends EppResource>>of(DomainBase.class),
|
||||
ImmutableSet.<Class<? extends ImmutableObject>>of(BillingEvent.OneTime.class))
|
||||
.createReaders())
|
||||
.hasSize(3);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_childEntityReader_oneReaderPerBucket() throws Exception {
|
||||
createTld("tld");
|
||||
Set<ImmutableObject> historyEntries = new HashSet<>();
|
||||
for (int i = 1; i <= 3; i++) {
|
||||
DomainBase domain = persistSimpleResource(newDomainBase(i + ".tld"));
|
||||
historyEntries.add(
|
||||
persistResource(
|
||||
new DomainHistory.Builder()
|
||||
.setDomain(domain)
|
||||
.setType(HistoryEntry.Type.DOMAIN_CREATE)
|
||||
.setModificationTime(now)
|
||||
.setRegistrarId(domain.getCreationRegistrarId())
|
||||
.build())
|
||||
.asHistoryEntry());
|
||||
persistResource(EppResourceIndex.create(getBucketKey(i), Key.create(domain)));
|
||||
}
|
||||
Set<ImmutableObject> seen = new HashSet<>();
|
||||
for (InputReader<ImmutableObject> reader :
|
||||
EppResourceInputs.createChildEntityInput(
|
||||
ImmutableSet.<Class<? extends EppResource>>of(DomainBase.class),
|
||||
ImmutableSet.<Class<? extends ImmutableObject>>of(HistoryEntry.class))
|
||||
.createReaders()) {
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
seen.add(reader.next());
|
||||
try {
|
||||
ImmutableObject o = reader.next();
|
||||
assertWithMessage("Unexpected element: %s", o).fail();
|
||||
} catch (NoSuchElementException expected) {
|
||||
}
|
||||
}
|
||||
assertThat(seen).containsExactlyElementsIn(historyEntries);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_childEntityReader_survivesAcrossSerialization() throws Exception {
|
||||
setupResources();
|
||||
Set<ImmutableObject> seen = new HashSet<>();
|
||||
InputReader<ImmutableObject> reader =
|
||||
EppResourceInputs.createChildEntityInput(
|
||||
ImmutableSet.of(EppResource.class),
|
||||
ImmutableSet.<Class<? extends ImmutableObject>>of(
|
||||
HistoryEntry.class, BillingEvent.OneTime.class, BillingEvent.Recurring.class))
|
||||
.createReaders()
|
||||
.get(0);
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
seen.add(reader.next());
|
||||
seen.add(reader.next());
|
||||
reader.endSlice();
|
||||
InputReader<ImmutableObject> deserializedReader = serializeAndDeserialize(reader);
|
||||
deserializedReader.beginSlice();
|
||||
seen.add(deserializedReader.next());
|
||||
seen.add(deserializedReader.next());
|
||||
assertThat(seen)
|
||||
.containsExactly(
|
||||
domainHistoryEntryA.asHistoryEntry(),
|
||||
contactHistoryEntry.asHistoryEntry(),
|
||||
oneTimeA,
|
||||
recurringA);
|
||||
assertThrows(NoSuchElementException.class, deserializedReader::next);
|
||||
}
|
||||
}
|
||||
@@ -1,138 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.mapreduce.inputs;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static com.google.common.truth.Truth.assertWithMessage;
|
||||
|
||||
import com.google.appengine.tools.mapreduce.Input;
|
||||
import com.google.appengine.tools.mapreduce.InputReader;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.model.ofy.CommitLogBucket;
|
||||
import google.registry.model.ofy.CommitLogManifest;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.DatabaseHelper;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Set;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link CommitLogManifestInput}. */
|
||||
final class CommitLogManifestInputTest {
|
||||
|
||||
private static final DateTime DATE_TIME_OLD = DateTime.parse("2015-12-19T12:00Z");
|
||||
private static final DateTime DATE_TIME_OLD2 = DateTime.parse("2016-12-19T11:59Z");
|
||||
|
||||
private static final DateTime DATE_TIME_THRESHOLD = DateTime.parse("2016-12-19T12:00Z");
|
||||
|
||||
private static final DateTime DATE_TIME_NEW = DateTime.parse("2016-12-19T12:01Z");
|
||||
private static final DateTime DATE_TIME_NEW2 = DateTime.parse("2017-12-19T12:00Z");
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@RegisterExtension
|
||||
final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().build();
|
||||
|
||||
@Test
|
||||
void testInputOlderThan_allFound() throws Exception {
|
||||
Set<Key<CommitLogManifest>> created = new HashSet<>();
|
||||
for (int i = 1; i <= 3; i++) {
|
||||
created.add(createManifest(CommitLogBucket.getBucketKey(i), DATE_TIME_OLD));
|
||||
}
|
||||
List<Key<CommitLogManifest>> seen = new ArrayList<>();
|
||||
Input<Key<CommitLogManifest>> input = new CommitLogManifestInput(DATE_TIME_THRESHOLD);
|
||||
for (InputReader<Key<CommitLogManifest>> reader : input.createReaders()) {
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
seen.add(reader.next());
|
||||
try {
|
||||
Key<CommitLogManifest> key = reader.next();
|
||||
assertWithMessage("Unexpected element: %s", key).fail();
|
||||
} catch (NoSuchElementException expected) {
|
||||
}
|
||||
}
|
||||
assertThat(seen).containsExactlyElementsIn(created);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInputOlderThan_skipsNew() throws Exception {
|
||||
Set<Key<CommitLogManifest>> old = new HashSet<>();
|
||||
for (int i = 1; i <= 3; i++) {
|
||||
createManifest(CommitLogBucket.getBucketKey(i), DATE_TIME_NEW);
|
||||
createManifest(CommitLogBucket.getBucketKey(i), DATE_TIME_NEW2);
|
||||
old.add(createManifest(CommitLogBucket.getBucketKey(i), DATE_TIME_OLD));
|
||||
old.add(createManifest(CommitLogBucket.getBucketKey(i), DATE_TIME_OLD2));
|
||||
}
|
||||
List<Key<CommitLogManifest>> seen = new ArrayList<>();
|
||||
Input<Key<CommitLogManifest>> input = new CommitLogManifestInput(DATE_TIME_THRESHOLD);
|
||||
for (InputReader<Key<CommitLogManifest>> reader : input.createReaders()) {
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
try {
|
||||
Key<CommitLogManifest> key = null;
|
||||
for (int i = 0; i < 10; i++) {
|
||||
key = reader.next();
|
||||
seen.add(key);
|
||||
}
|
||||
assertWithMessage("Unexpected element: %s", key).fail();
|
||||
} catch (NoSuchElementException expected) {
|
||||
}
|
||||
}
|
||||
assertThat(seen).containsExactlyElementsIn(old);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testInputAll() throws Exception {
|
||||
Set<Key<CommitLogManifest>> created = new HashSet<>();
|
||||
for (int i = 1; i <= 3; i++) {
|
||||
created.add(createManifest(CommitLogBucket.getBucketKey(i), DATE_TIME_NEW));
|
||||
created.add(createManifest(CommitLogBucket.getBucketKey(i), DATE_TIME_NEW2));
|
||||
created.add(createManifest(CommitLogBucket.getBucketKey(i), DATE_TIME_OLD));
|
||||
created.add(createManifest(CommitLogBucket.getBucketKey(i), DATE_TIME_OLD2));
|
||||
}
|
||||
List<Key<CommitLogManifest>> seen = new ArrayList<>();
|
||||
Input<Key<CommitLogManifest>> input = new CommitLogManifestInput();
|
||||
for (InputReader<Key<CommitLogManifest>> reader : input.createReaders()) {
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
try {
|
||||
Key<CommitLogManifest> key = null;
|
||||
for (int i = 0; i < 10; i++) {
|
||||
key = reader.next();
|
||||
seen.add(key);
|
||||
}
|
||||
assertWithMessage("Unexpected element: %s", key).fail();
|
||||
} catch (NoSuchElementException expected) {
|
||||
}
|
||||
}
|
||||
assertThat(seen).containsExactlyElementsIn(created);
|
||||
}
|
||||
|
||||
private static Key<CommitLogManifest> createManifest(
|
||||
Key<CommitLogBucket> parent, DateTime dateTime) {
|
||||
CommitLogManifest commitLogManifest = CommitLogManifest.create(parent, dateTime, null);
|
||||
DatabaseHelper.persistResource(commitLogManifest);
|
||||
return Key.create(commitLogManifest);
|
||||
}
|
||||
}
|
||||
@@ -1,258 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.mapreduce.inputs;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static com.google.common.truth.Truth.assertWithMessage;
|
||||
import static google.registry.mapreduce.inputs.EppResourceInputs.createEntityInput;
|
||||
import static google.registry.mapreduce.inputs.EppResourceInputs.createKeyInput;
|
||||
import static google.registry.model.index.EppResourceIndexBucket.getBucketKey;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.newContactResource;
|
||||
import static google.registry.testing.DatabaseHelper.newDomainBase;
|
||||
import static google.registry.testing.DatabaseHelper.newHostResource;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveContact;
|
||||
import static google.registry.testing.DatabaseHelper.persistEppResourceInFirstBucket;
|
||||
import static google.registry.testing.DatabaseHelper.persistResource;
|
||||
import static google.registry.testing.DatabaseHelper.persistSimpleResource;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.google.appengine.tools.mapreduce.InputReader;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.model.EppResource;
|
||||
import google.registry.model.contact.ContactResource;
|
||||
import google.registry.model.domain.DomainBase;
|
||||
import google.registry.model.host.HostResource;
|
||||
import google.registry.model.index.EppResourceIndex;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.util.HashSet;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Set;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Tests {@link EppResourceInputs} */
|
||||
class EppResourceInputsTest {
|
||||
|
||||
private static final double EPSILON = 0.0001;
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@RegisterExtension
|
||||
final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().build();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <T> T serializeAndDeserialize(T obj) throws Exception {
|
||||
try (ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
|
||||
ObjectOutputStream objectOut = new ObjectOutputStream(byteOut)) {
|
||||
objectOut.writeObject(obj);
|
||||
try (ByteArrayInputStream byteIn = new ByteArrayInputStream(byteOut.toByteArray());
|
||||
ObjectInputStream objectIn = new ObjectInputStream(byteIn)) {
|
||||
return (T) objectIn.readObject();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_keyInputType_polymorphicBaseType() {
|
||||
createKeyInput(EppResource.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailure_keyInputType_noInheritanceBetweenTypes_eppResource() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> createKeyInput(EppResource.class, DomainBase.class));
|
||||
assertThat(thrown).hasMessageThat().contains("inheritance");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailure_entityInputType_noInheritanceBetweenTypes_eppResource() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> createEntityInput(EppResource.class, DomainBase.class));
|
||||
assertThat(thrown).hasMessageThat().contains("inheritance");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailure_entityInputType_noInheritanceBetweenTypes_subclasses() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> createEntityInput(EppResource.class, ContactResource.class));
|
||||
assertThat(thrown).hasMessageThat().contains("inheritance");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testReaderCountMatchesBucketCount() throws Exception {
|
||||
assertThat(createKeyInput(DomainBase.class).createReaders()).hasSize(3);
|
||||
assertThat(createEntityInput(DomainBase.class).createReaders()).hasSize(3);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testKeyInput_oneReaderPerBucket() throws Exception {
|
||||
createTld("tld");
|
||||
Set<Key<DomainBase>> domains = new HashSet<>();
|
||||
for (int i = 1; i <= 3; i++) {
|
||||
Key<DomainBase> key = Key.create(newDomainBase(i + ".tld"));
|
||||
domains.add(key);
|
||||
persistResource(EppResourceIndex.create(getBucketKey(i), key));
|
||||
}
|
||||
Set<Key<DomainBase>> seen = new HashSet<>();
|
||||
for (InputReader<Key<DomainBase>> reader : createKeyInput(DomainBase.class).createReaders()) {
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
seen.add(reader.next());
|
||||
try {
|
||||
Key<DomainBase> key = reader.next();
|
||||
assertWithMessage("Unexpected element: %s", key).fail();
|
||||
} catch (NoSuchElementException expected) {
|
||||
}
|
||||
}
|
||||
assertThat(seen).containsExactlyElementsIn(domains);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testEntityInput_oneReaderPerBucket() throws Exception {
|
||||
createTld("tld");
|
||||
Set<DomainBase> domains = new HashSet<>();
|
||||
for (int i = 1; i <= 3; i++) {
|
||||
// Persist the domain as a simple resource so that it doesn't automatically get an ERI.
|
||||
DomainBase domain = persistSimpleResource(newDomainBase(i + ".tld"));
|
||||
domains.add(domain);
|
||||
persistResource(EppResourceIndex.create(getBucketKey(i), Key.create(domain)));
|
||||
}
|
||||
Set<DomainBase> seen = new HashSet<>();
|
||||
for (InputReader<DomainBase> reader : createEntityInput(DomainBase.class).createReaders()) {
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
seen.add(reader.next());
|
||||
try {
|
||||
DomainBase domain = reader.next();
|
||||
assertWithMessage("Unexpected element: %s", domain).fail();
|
||||
} catch (NoSuchElementException expected) {
|
||||
}
|
||||
}
|
||||
assertThat(seen).containsExactlyElementsIn(domains);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_keyReader_survivesAcrossSerialization() throws Exception {
|
||||
createTld("tld");
|
||||
DomainBase domainA = persistEppResourceInFirstBucket(newDomainBase("a.tld"));
|
||||
DomainBase domainB = persistEppResourceInFirstBucket(newDomainBase("b.tld"));
|
||||
// Should be ignored. We'll know if it isn't because the progress counts will be off.
|
||||
persistActiveContact("contact");
|
||||
Set<Key<DomainBase>> seen = new HashSet<>();
|
||||
InputReader<Key<DomainBase>> reader = createKeyInput(DomainBase.class).createReaders().get(0);
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(0);
|
||||
seen.add(reader.next());
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(0.5);
|
||||
reader.endSlice();
|
||||
reader = serializeAndDeserialize(reader);
|
||||
reader.beginSlice();
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(0.5);
|
||||
seen.add(reader.next());
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(1);
|
||||
assertThat(seen).containsExactly(Key.create(domainA), Key.create(domainB));
|
||||
assertThrows(NoSuchElementException.class, reader::next);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_entityReader_survivesAcrossSerialization() throws Exception {
|
||||
createTld("tld");
|
||||
DomainBase domainA = persistEppResourceInFirstBucket(newDomainBase("a.tld"));
|
||||
DomainBase domainB = persistEppResourceInFirstBucket(newDomainBase("b.tld"));
|
||||
// Should be ignored. We'll know if it isn't because the progress counts will be off.
|
||||
persistActiveContact("contact");
|
||||
Set<DomainBase> seen = new HashSet<>();
|
||||
InputReader<DomainBase> reader = createEntityInput(DomainBase.class).createReaders().get(0);
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(0);
|
||||
seen.add(reader.next());
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(0.5);
|
||||
reader.endSlice();
|
||||
InputReader<DomainBase> deserializedReader = serializeAndDeserialize(reader);
|
||||
deserializedReader.beginSlice();
|
||||
assertThat(deserializedReader.getProgress()).isWithin(EPSILON).of(0.5);
|
||||
seen.add(deserializedReader.next());
|
||||
assertThat(deserializedReader.getProgress()).isWithin(EPSILON).of(1);
|
||||
deserializedReader.endSlice();
|
||||
deserializedReader.endShard();
|
||||
assertThat(seen).containsExactly(domainA, domainB);
|
||||
assertThrows(NoSuchElementException.class, deserializedReader::next);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_entityReader_filtersOnMultipleTypes() throws Exception {
|
||||
createTld("tld");
|
||||
DomainBase domain = persistEppResourceInFirstBucket(newDomainBase("a.tld"));
|
||||
HostResource host = persistEppResourceInFirstBucket(newHostResource("ns1.example.com"));
|
||||
persistEppResourceInFirstBucket(newContactResource("contact"));
|
||||
Set<EppResource> seen = new HashSet<>();
|
||||
InputReader<EppResource> reader =
|
||||
EppResourceInputs.<EppResource>createEntityInput(DomainBase.class, HostResource.class)
|
||||
.createReaders()
|
||||
.get(0);
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(0);
|
||||
seen.add(reader.next());
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(0.5);
|
||||
seen.add(reader.next());
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(1.0);
|
||||
assertThat(seen).containsExactly(domain, host);
|
||||
assertThrows(NoSuchElementException.class, reader::next);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_entityReader_noFilteringWhenUsingEppResource() throws Exception {
|
||||
createTld("tld");
|
||||
ContactResource contact = persistEppResourceInFirstBucket(newContactResource("contact"));
|
||||
// Specify the contact since persistActiveDomain() creates a hidden one.
|
||||
DomainBase domain1 = persistEppResourceInFirstBucket(newDomainBase("a.tld", contact));
|
||||
DomainBase domain2 = persistEppResourceInFirstBucket(newDomainBase("b.tld", contact));
|
||||
HostResource host = persistEppResourceInFirstBucket(newHostResource("ns1.example.com"));
|
||||
Set<EppResource> seen = new HashSet<>();
|
||||
InputReader<EppResource> reader = createEntityInput(EppResource.class).createReaders().get(0);
|
||||
reader.beginShard();
|
||||
reader.beginSlice();
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(0);
|
||||
seen.add(reader.next());
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(0.25);
|
||||
seen.add(reader.next());
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(0.5);
|
||||
seen.add(reader.next());
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(0.75);
|
||||
seen.add(reader.next());
|
||||
assertThat(reader.getProgress()).isWithin(EPSILON).of(1.0);
|
||||
assertThat(seen).containsExactly(domain1, domain2, host, contact);
|
||||
assertThrows(NoSuchElementException.class, reader::next);
|
||||
}
|
||||
}
|
||||
@@ -18,9 +18,7 @@ import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.EppResourceUtils.loadAtPointInTime;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.newHostResource;
|
||||
import static google.registry.testing.DatabaseHelper.persistNewRegistrars;
|
||||
import static google.registry.testing.DatabaseHelper.persistResource;
|
||||
import static google.registry.testing.DatabaseHelper.persistResourceWithCommitLog;
|
||||
import static google.registry.util.DateTimeUtils.START_OF_TIME;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
|
||||
@@ -31,7 +29,6 @@ import google.registry.testing.DualDatabaseTest;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.testing.InjectExtension;
|
||||
import google.registry.testing.TestOfyAndSql;
|
||||
import google.registry.testing.TestOfyOnly;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Duration;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
@@ -80,107 +77,4 @@ class EppResourceUtilsTest {
|
||||
.build());
|
||||
assertThat(loadAtPointInTime(host, clock.nowUtc())).isEqualTo(host);
|
||||
}
|
||||
|
||||
@TestOfyOnly
|
||||
void testLoadAtPointInTime_usingIntactRevisionHistory_returnsMutationValue() {
|
||||
persistNewRegistrars("OLD", "NEW");
|
||||
clock.advanceOneMilli();
|
||||
// Save resource with a commit log that we can read in later as a revisions map value.
|
||||
HostResource oldHost =
|
||||
persistResourceWithCommitLog(
|
||||
newHostResource("ns1.cat.tld")
|
||||
.asBuilder()
|
||||
.setCreationTimeForTest(START_OF_TIME)
|
||||
.setPersistedCurrentSponsorRegistrarId("OLD")
|
||||
.build());
|
||||
// Advance a day so that the next created revision entry doesn't overwrite the existing one.
|
||||
clock.advanceBy(Duration.standardDays(1));
|
||||
// Overwrite the current host with one that has different data.
|
||||
HostResource currentHost =
|
||||
persistResource(oldHost.asBuilder().setPersistedCurrentSponsorRegistrarId("NEW").build());
|
||||
// Load at the point in time just before the latest update; the floor entry of the revisions
|
||||
// map should point to the manifest for the first save, so we should get the old host.
|
||||
assertThat(loadAtPointInTime(currentHost, clock.nowUtc().minusMillis(1))).isEqualTo(oldHost);
|
||||
}
|
||||
|
||||
@TestOfyOnly
|
||||
void testLoadAtPointInTime_brokenRevisionHistory_returnsResourceAsIs() {
|
||||
// Don't save a commit log since we want to test the handling of a broken revisions key.
|
||||
HostResource oldHost =
|
||||
persistResource(
|
||||
newHostResource("ns1.cat.tld")
|
||||
.asBuilder()
|
||||
.setCreationTimeForTest(START_OF_TIME)
|
||||
.setPersistedCurrentSponsorRegistrarId("OLD")
|
||||
.build());
|
||||
// Advance a day so that the next created revision entry doesn't overwrite the existing one.
|
||||
clock.advanceBy(Duration.standardDays(1));
|
||||
// Overwrite the existing resource to force revisions map use.
|
||||
HostResource host =
|
||||
persistResource(oldHost.asBuilder().setPersistedCurrentSponsorRegistrarId("NEW").build());
|
||||
// Load at the point in time just before the latest update; the old host is not recoverable
|
||||
// (revisions map link is broken, and guessing using the oldest revision map entry finds the
|
||||
// same broken link), so just returns the current host.
|
||||
assertThat(loadAtPointInTime(host, clock.nowUtc().minusMillis(1))).isEqualTo(host);
|
||||
}
|
||||
|
||||
@TestOfyOnly
|
||||
void testLoadAtPointInTime_fallback_returnsMutationValueForOldestRevision() {
|
||||
clock.advanceOneMilli();
|
||||
// Save a commit log that we can fall back to.
|
||||
HostResource oldHost =
|
||||
persistResourceWithCommitLog(
|
||||
newHostResource("ns1.cat.tld")
|
||||
.asBuilder()
|
||||
.setCreationTimeForTest(START_OF_TIME)
|
||||
.setPersistedCurrentSponsorRegistrarId("OLD")
|
||||
.build());
|
||||
// Advance a day so that the next created revision entry doesn't overwrite the existing one.
|
||||
clock.advanceBy(Duration.standardDays(1));
|
||||
// Overwrite the current host with one that has different data.
|
||||
HostResource currentHost =
|
||||
persistResource(oldHost.asBuilder().setPersistedCurrentSponsorRegistrarId("NEW").build());
|
||||
// Load at the point in time before the first update; there will be no floor entry for the
|
||||
// revisions map, so give up and return the oldest revision entry's mutation value (the old host
|
||||
// data).
|
||||
assertThat(loadAtPointInTime(currentHost, clock.nowUtc().minusDays(2))).isEqualTo(oldHost);
|
||||
}
|
||||
|
||||
@TestOfyOnly
|
||||
void testLoadAtPointInTime_ultimateFallback_onlyOneRevision_returnsCurrentResource() {
|
||||
clock.advanceOneMilli();
|
||||
// Don't save a commit log; we want to test that we load from the current resource.
|
||||
HostResource host =
|
||||
persistResource(
|
||||
newHostResource("ns1.cat.tld")
|
||||
.asBuilder()
|
||||
.setCreationTimeForTest(START_OF_TIME)
|
||||
.setPersistedCurrentSponsorRegistrarId("OLD")
|
||||
.build());
|
||||
// Load at the point in time before the first save; there will be no floor entry for the
|
||||
// revisions map. Since the oldest revision entry is the only (i.e. current) revision, return
|
||||
// the resource.
|
||||
assertThat(loadAtPointInTime(host, clock.nowUtc().minusMillis(1))).isEqualTo(host);
|
||||
}
|
||||
|
||||
@TestOfyOnly
|
||||
void testLoadAtPointInTime_moreThanThirtyDaysInPast_historyIsPurged() {
|
||||
clock.advanceOneMilli();
|
||||
HostResource host =
|
||||
persistResourceWithCommitLog(newHostResource("ns1.example.net"));
|
||||
assertThat(host.getRevisions()).hasSize(1);
|
||||
clock.advanceBy(Duration.standardDays(31));
|
||||
host = persistResourceWithCommitLog(host);
|
||||
assertThat(host.getRevisions()).hasSize(2);
|
||||
clock.advanceBy(Duration.standardDays(31));
|
||||
host = persistResourceWithCommitLog(host);
|
||||
assertThat(host.getRevisions()).hasSize(2);
|
||||
// Even though there is no revision, make a best effort guess to use the oldest revision.
|
||||
assertThat(
|
||||
loadAtPointInTime(host, clock.nowUtc().minus(Duration.standardDays(32)))
|
||||
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(host.getRevisions().firstKey());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,11 +31,6 @@ import google.registry.model.index.EppResourceIndexBucket;
|
||||
import google.registry.model.index.ForeignKeyIndex.ForeignKeyContactIndex;
|
||||
import google.registry.model.index.ForeignKeyIndex.ForeignKeyDomainIndex;
|
||||
import google.registry.model.index.ForeignKeyIndex.ForeignKeyHostIndex;
|
||||
import google.registry.model.ofy.CommitLogBucket;
|
||||
import google.registry.model.ofy.CommitLogCheckpoint;
|
||||
import google.registry.model.ofy.CommitLogCheckpointRoot;
|
||||
import google.registry.model.ofy.CommitLogManifest;
|
||||
import google.registry.model.ofy.CommitLogMutation;
|
||||
import google.registry.model.poll.PollMessage;
|
||||
import google.registry.model.rde.RdeRevision;
|
||||
import google.registry.model.registrar.Registrar;
|
||||
@@ -65,9 +60,6 @@ public class ClassPathManagerTest {
|
||||
assertThat(ClassPathManager.getClass("ForeignKeyContactIndex"))
|
||||
.isEqualTo(ForeignKeyContactIndex.class);
|
||||
assertThat(ClassPathManager.getClass("Modification")).isEqualTo(Modification.class);
|
||||
assertThat(ClassPathManager.getClass("CommitLogCheckpoint"))
|
||||
.isEqualTo(CommitLogCheckpoint.class);
|
||||
assertThat(ClassPathManager.getClass("CommitLogManifest")).isEqualTo(CommitLogManifest.class);
|
||||
assertThat(ClassPathManager.getClass("AllocationToken")).isEqualTo(AllocationToken.class);
|
||||
assertThat(ClassPathManager.getClass("OneTime")).isEqualTo(OneTime.class);
|
||||
assertThat(ClassPathManager.getClass("Cursor")).isEqualTo(Cursor.class);
|
||||
@@ -79,10 +71,7 @@ public class ClassPathManagerTest {
|
||||
assertThat(ClassPathManager.getClass("ContactResource")).isEqualTo(ContactResource.class);
|
||||
assertThat(ClassPathManager.getClass("Cancellation")).isEqualTo(Cancellation.class);
|
||||
assertThat(ClassPathManager.getClass("RegistrarContact")).isEqualTo(RegistrarContact.class);
|
||||
assertThat(ClassPathManager.getClass("CommitLogBucket")).isEqualTo(CommitLogBucket.class);
|
||||
assertThat(ClassPathManager.getClass("LastSqlTransaction")).isEqualTo(LastSqlTransaction.class);
|
||||
assertThat(ClassPathManager.getClass("CommitLogCheckpointRoot"))
|
||||
.isEqualTo(CommitLogCheckpointRoot.class);
|
||||
assertThat(ClassPathManager.getClass("GaeUserIdConverter")).isEqualTo(GaeUserIdConverter.class);
|
||||
assertThat(ClassPathManager.getClass("EppResourceIndexBucket"))
|
||||
.isEqualTo(EppResourceIndexBucket.class);
|
||||
@@ -90,7 +79,6 @@ public class ClassPathManagerTest {
|
||||
assertThat(ClassPathManager.getClass("EntityGroupRoot")).isEqualTo(EntityGroupRoot.class);
|
||||
assertThat(ClassPathManager.getClass("Lock")).isEqualTo(Lock.class);
|
||||
assertThat(ClassPathManager.getClass("DomainBase")).isEqualTo(DomainBase.class);
|
||||
assertThat(ClassPathManager.getClass("CommitLogMutation")).isEqualTo(CommitLogMutation.class);
|
||||
assertThat(ClassPathManager.getClass("HistoryEntry")).isEqualTo(HistoryEntry.class);
|
||||
assertThat(ClassPathManager.getClass("PollMessage")).isEqualTo(PollMessage.class);
|
||||
assertThat(ClassPathManager.getClass("ForeignKeyHostIndex"))
|
||||
@@ -132,10 +120,6 @@ public class ClassPathManagerTest {
|
||||
assertThat(ClassPathManager.getClassName(ForeignKeyContactIndex.class))
|
||||
.isEqualTo("ForeignKeyContactIndex");
|
||||
assertThat(ClassPathManager.getClassName(Modification.class)).isEqualTo("Modification");
|
||||
assertThat(ClassPathManager.getClassName(CommitLogCheckpoint.class))
|
||||
.isEqualTo("CommitLogCheckpoint");
|
||||
assertThat(ClassPathManager.getClassName(CommitLogManifest.class))
|
||||
.isEqualTo("CommitLogManifest");
|
||||
assertThat(ClassPathManager.getClassName(AllocationToken.class)).isEqualTo("AllocationToken");
|
||||
assertThat(ClassPathManager.getClassName(OneTime.class)).isEqualTo("OneTime");
|
||||
assertThat(ClassPathManager.getClassName(Cursor.class)).isEqualTo("Cursor");
|
||||
@@ -147,11 +131,8 @@ public class ClassPathManagerTest {
|
||||
assertThat(ClassPathManager.getClassName(ContactResource.class)).isEqualTo("ContactResource");
|
||||
assertThat(ClassPathManager.getClassName(Cancellation.class)).isEqualTo("Cancellation");
|
||||
assertThat(ClassPathManager.getClassName(RegistrarContact.class)).isEqualTo("RegistrarContact");
|
||||
assertThat(ClassPathManager.getClassName(CommitLogBucket.class)).isEqualTo("CommitLogBucket");
|
||||
assertThat(ClassPathManager.getClassName(LastSqlTransaction.class))
|
||||
.isEqualTo("LastSqlTransaction");
|
||||
assertThat(ClassPathManager.getClassName(CommitLogCheckpointRoot.class))
|
||||
.isEqualTo("CommitLogCheckpointRoot");
|
||||
assertThat(ClassPathManager.getClassName(GaeUserIdConverter.class))
|
||||
.isEqualTo("GaeUserIdConverter");
|
||||
assertThat(ClassPathManager.getClassName(EppResourceIndexBucket.class))
|
||||
@@ -160,8 +141,6 @@ public class ClassPathManagerTest {
|
||||
assertThat(ClassPathManager.getClassName(EntityGroupRoot.class)).isEqualTo("EntityGroupRoot");
|
||||
assertThat(ClassPathManager.getClassName(Lock.class)).isEqualTo("Lock");
|
||||
assertThat(ClassPathManager.getClassName(DomainBase.class)).isEqualTo("DomainBase");
|
||||
assertThat(ClassPathManager.getClassName(CommitLogMutation.class))
|
||||
.isEqualTo("CommitLogMutation");
|
||||
assertThat(ClassPathManager.getClassName(HistoryEntry.class)).isEqualTo("HistoryEntry");
|
||||
assertThat(ClassPathManager.getClassName(PollMessage.class)).isEqualTo("PollMessage");
|
||||
assertThat(ClassPathManager.getClassName(ForeignKeyHostIndex.class))
|
||||
|
||||
@@ -1,83 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.model.index;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.config.RegistryConfig.getEppResourceIndexBucketCount;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveContact;
|
||||
import static google.registry.testing.DatabaseHelper.persistResource;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.model.EntityTestCase;
|
||||
import google.registry.model.contact.ContactResource;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link EppResourceIndex}. */
|
||||
class EppResourceIndexTest extends EntityTestCase {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
private ContactResource contact;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
createTld("tld");
|
||||
// The DatabaseHelper here creates the EppResourceIndex for us.
|
||||
contact = persistActiveContact("abcd1357");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPersistence() {
|
||||
EppResourceIndex loadedIndex = Iterables.getOnlyElement(getEppResourceIndexObjects());
|
||||
assertThat(auditedOfy().load().key(loadedIndex.reference).now()).isEqualTo(contact);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testIndexing() throws Exception {
|
||||
verifyDatastoreIndexing(Iterables.getOnlyElement(getEppResourceIndexObjects()), "kind");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testIdempotentOnUpdate() {
|
||||
contact = persistResource(contact.asBuilder().setEmailAddress("abc@def.fake").build());
|
||||
EppResourceIndex loadedIndex = Iterables.getOnlyElement(getEppResourceIndexObjects());
|
||||
assertThat(auditedOfy().load().key(loadedIndex.reference).now()).isEqualTo(contact);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all EppResourceIndex objects across all buckets.
|
||||
*/
|
||||
private static ImmutableList<EppResourceIndex> getEppResourceIndexObjects() {
|
||||
ImmutableList.Builder<EppResourceIndex> indexEntities = new ImmutableList.Builder<>();
|
||||
for (int i = 0; i < getEppResourceIndexBucketCount(); i++) {
|
||||
indexEntities.addAll(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.type(EppResourceIndex.class)
|
||||
.ancestor(Key.create(EppResourceIndexBucket.class, i + 1)));
|
||||
}
|
||||
return indexEntities.build();
|
||||
}
|
||||
}
|
||||
@@ -1,117 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.model.ofy;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.CommitLogBucket.getBucketKey;
|
||||
import static google.registry.model.ofy.CommitLogBucket.loadAllBuckets;
|
||||
import static google.registry.model.ofy.CommitLogBucket.loadBucket;
|
||||
import static google.registry.testing.DatabaseHelper.persistResource;
|
||||
import static google.registry.util.DateTimeUtils.END_OF_TIME;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.google.common.base.Suppliers;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.googlecode.objectify.annotation.Cache;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.InjectExtension;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Tests for {@link CommitLogBucket}. */
|
||||
public class CommitLogBucketTest {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().build();
|
||||
|
||||
@RegisterExtension public final InjectExtension inject = new InjectExtension();
|
||||
private CommitLogBucket bucket;
|
||||
|
||||
@BeforeEach
|
||||
void before() {
|
||||
// Save the bucket with some non-default properties set so that we can distinguish a correct
|
||||
// load from one that returns a newly created bucket instance.
|
||||
bucket = persistResource(
|
||||
new CommitLogBucket.Builder()
|
||||
.setLastWrittenTime(END_OF_TIME)
|
||||
.setBucketNum(1)
|
||||
.build());
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_getBucketKey_createsBucketKeyInDefaultNamespace() {
|
||||
// Key.getNamespace() returns the empty string for the default namespace, not null.
|
||||
assertThat(getBucketKey(1).getRaw().getNamespace()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_getBucketKey_bucketNumberTooLow_throws() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(IllegalArgumentException.class, () -> getBucketKey(0));
|
||||
assertThat(thrown).hasMessageThat().contains("0 not in [");
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_getBucketKey_bucketNumberTooHigh_throws() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(IllegalArgumentException.class, () -> getBucketKey(11));
|
||||
assertThat(thrown).hasMessageThat().contains("11 not in [");
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_getArbitraryBucketId_withSupplierOverridden() {
|
||||
inject.setStaticField(
|
||||
CommitLogBucket.class, "bucketIdSupplier", Suppliers.ofInstance(4)); // xkcd.com/221
|
||||
// Try multiple times just in case it's actually still random. If it is, the probability of
|
||||
// this test passing is googol^-1, so I think we're pretty safe.
|
||||
for (int i = 0; i < 100; i++) {
|
||||
assertThat(CommitLogBucket.getArbitraryBucketId()).isEqualTo(4);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_loadBucket_loadsTheBucket() {
|
||||
assertThat(loadBucket(getBucketKey(1))).isEqualTo(bucket);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_loadBucket_forNonexistentBucket_returnsNewBucket() {
|
||||
assertThat(loadBucket(getBucketKey(3))).isEqualTo(
|
||||
new CommitLogBucket.Builder().setBucketNum(3).build());
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_loadAllBuckets_loadsExistingBuckets_orNewOnesIfNonexistent() {
|
||||
ImmutableSet<CommitLogBucket> buckets = loadAllBuckets();
|
||||
assertThat(buckets).hasSize(3);
|
||||
assertThat(buckets).contains(bucket);
|
||||
assertThat(buckets).contains(new CommitLogBucket.Builder().setBucketNum(2).build());
|
||||
assertThat(buckets).contains(new CommitLogBucket.Builder().setBucketNum(3).build());
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_noCacheAnnotation() {
|
||||
// Don't ever put @Cache on CommitLogBucket; it could mess up the checkpointing algorithm.
|
||||
assertThat(CommitLogBucket.class.isAnnotationPresent(Cache.class)).isFalse();
|
||||
}
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.model.ofy;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.util.DateTimeUtils.START_OF_TIME;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Tests for {@link CommitLogCheckpoint}. */
|
||||
public class CommitLogCheckpointTest {
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().build();
|
||||
|
||||
private static final DateTime T1 = START_OF_TIME;
|
||||
private static final DateTime T2 = START_OF_TIME.plusMillis(1);
|
||||
private static final DateTime T3 = START_OF_TIME.plusMillis(2);
|
||||
|
||||
@Test
|
||||
void test_getCheckpointTime() {
|
||||
DateTime now = DateTime.now(UTC);
|
||||
CommitLogCheckpoint checkpoint =
|
||||
CommitLogCheckpoint.create(now, ImmutableMap.of(1, T1, 2, T2, 3, T3));
|
||||
assertThat(checkpoint.getCheckpointTime()).isEqualTo(now);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_getBucketTimestamps() {
|
||||
CommitLogCheckpoint checkpoint =
|
||||
CommitLogCheckpoint.create(DateTime.now(UTC), ImmutableMap.of(1, T1, 2, T2, 3, T3));
|
||||
assertThat(checkpoint.getBucketTimestamps()).containsExactly(1, T1, 2, T2, 3, T3);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_create_notEnoughBucketTimestamps_throws() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> CommitLogCheckpoint.create(DateTime.now(UTC), ImmutableMap.of(1, T1, 2, T2)));
|
||||
assertThat(thrown).hasMessageThat().contains("Bucket ids are incorrect");
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_create_tooManyBucketTimestamps_throws() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
CommitLogCheckpoint.create(
|
||||
DateTime.now(UTC), ImmutableMap.of(1, T1, 2, T2, 3, T3, 4, T1)));
|
||||
assertThat(thrown).hasMessageThat().contains("Bucket ids are incorrect");
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_create_wrongBucketIds_throws() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
CommitLogCheckpoint.create(
|
||||
DateTime.now(UTC), ImmutableMap.of(0, T1, 1, T2, 2, T3)));
|
||||
assertThat(thrown).hasMessageThat().contains("Bucket ids are incorrect");
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_create_wrongBucketIdOrder_throws() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
CommitLogCheckpoint.create(
|
||||
DateTime.now(UTC), ImmutableMap.of(2, T2, 1, T1, 3, T3)));
|
||||
assertThat(thrown).hasMessageThat().contains("Bucket ids are incorrect");
|
||||
}
|
||||
}
|
||||
@@ -1,90 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.model.ofy;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
|
||||
import com.google.appengine.api.datastore.Entity;
|
||||
import com.google.appengine.api.datastore.EntityTranslator;
|
||||
import com.google.appengine.api.datastore.KeyFactory;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.tld.Registry;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Tests for {@link CommitLogMutation}. */
|
||||
public class CommitLogMutationTest {
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().build();
|
||||
|
||||
private static final DateTime NOW = DateTime.now(DateTimeZone.UTC);
|
||||
|
||||
private Key<CommitLogManifest> manifestKey;
|
||||
private ImmutableObject someObject;
|
||||
|
||||
@BeforeEach
|
||||
void before() {
|
||||
// Initialize this late to avoid dependency on NamespaceManager prior to AppEngineExtension.
|
||||
manifestKey = CommitLogManifest.createKey(CommitLogBucket.getBucketKey(1), NOW);
|
||||
createTld("tld");
|
||||
someObject = Registry.get("tld");
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_createKey_createsKeyWithWebsafeKeystring() {
|
||||
Key<CommitLogMutation> mutationKey =
|
||||
CommitLogMutation.createKey(manifestKey, Key.create(someObject));
|
||||
assertThat(mutationKey.getParent()).isEqualTo(manifestKey);
|
||||
assertThat(mutationKey.getName())
|
||||
.isEqualTo(KeyFactory.keyToString(Key.create(someObject).getRaw()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_create_createsExpectedMutation() {
|
||||
Entity rawEntity = convertToEntityInTxn(someObject);
|
||||
// Needs to be in a transaction so that registry-saving-to-entity will work.
|
||||
CommitLogMutation mutation =
|
||||
auditedOfy().transact(() -> CommitLogMutation.create(manifestKey, someObject));
|
||||
assertThat(Key.create(mutation))
|
||||
.isEqualTo(CommitLogMutation.createKey(manifestKey, Key.create(someObject)));
|
||||
assertThat(mutation.getEntity()).isEqualTo(rawEntity);
|
||||
assertThat(EntityTranslator.createFromPbBytes(mutation.getEntityProtoBytes()))
|
||||
.isEqualTo(rawEntity);
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_createRaw_createsExpectedMutation() {
|
||||
Entity rawEntity = convertToEntityInTxn(someObject);
|
||||
CommitLogMutation mutation = CommitLogMutation.createFromRaw(manifestKey, rawEntity);
|
||||
assertThat(Key.create(mutation))
|
||||
.isEqualTo(CommitLogMutation.createKey(manifestKey, Key.create(someObject)));
|
||||
assertThat(mutation.getEntity()).isEqualTo(rawEntity);
|
||||
assertThat(EntityTranslator.createFromPbBytes(mutation.getEntityProtoBytes()))
|
||||
.isEqualTo(rawEntity);
|
||||
}
|
||||
|
||||
private static Entity convertToEntityInTxn(final ImmutableObject object) {
|
||||
return auditedOfy().transact(() -> auditedOfy().save().toEntity(object));
|
||||
}
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
// Copyright 2020 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.model.ofy;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.model.ofy.TransactionInfo.Delete;
|
||||
import google.registry.model.registrar.Registrar;
|
||||
import google.registry.model.reporting.HistoryEntry;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
class EntityWritePrioritiesTest {
|
||||
|
||||
@RegisterExtension
|
||||
AppEngineExtension appEngine =
|
||||
new AppEngineExtension.Builder().withDatastoreAndCloudSql().build();
|
||||
|
||||
@Test
|
||||
void testGetPriority() {
|
||||
// just verify that the lowest is what we expect for both save and delete and verify that the
|
||||
// Registrar class is zero.
|
||||
ImmutableMap<Key<?>, Object> actions =
|
||||
ImmutableMap.of(
|
||||
Key.create(HistoryEntry.class, 100), TransactionInfo.Delete.SENTINEL,
|
||||
Key.create(HistoryEntry.class, 200), "fake history entry",
|
||||
Key.create(Registrar.class, 300), "fake registrar");
|
||||
ImmutableMap<Long, Integer> expectedValues =
|
||||
ImmutableMap.of(100L, EntityWritePriorities.DELETE_RANGE - 20, 200L, 20, 300L, 0);
|
||||
|
||||
for (ImmutableMap.Entry<Key<?>, Object> entry : actions.entrySet()) {
|
||||
assertThat(
|
||||
EntityWritePriorities.getEntityPriority(
|
||||
entry.getKey().getKind(), Delete.SENTINEL.equals(entry.getValue())))
|
||||
.isEqualTo(expectedValues.get(entry.getKey().getId()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,480 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.model.ofy;
|
||||
|
||||
import static com.google.appengine.api.datastore.EntityTranslator.convertToPb;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.common.EntityGroupRoot.getCrossTldKey;
|
||||
import static google.registry.model.ofy.CommitLogBucket.getBucketKey;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.persistence.transaction.TransactionManagerFactory.tm;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.googlecode.objectify.Key;
|
||||
import com.googlecode.objectify.annotation.Entity;
|
||||
import com.googlecode.objectify.annotation.Id;
|
||||
import com.googlecode.objectify.annotation.Parent;
|
||||
import google.registry.model.BackupGroupRoot;
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.common.EntityGroupRoot;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.testing.InjectExtension;
|
||||
import google.registry.testing.TestObject.TestVirtualObject;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests ensuring {@link Ofy} saves transactions to {@link CommitLogManifest}. */
|
||||
public class OfyCommitLogTest {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder()
|
||||
.withDatastoreAndCloudSql()
|
||||
.withOfyTestEntities(TestVirtualObject.class, Root.class, Child.class)
|
||||
.build();
|
||||
|
||||
@RegisterExtension public final InjectExtension inject = new InjectExtension();
|
||||
|
||||
private final FakeClock clock = new FakeClock(DateTime.parse("2000-01-01TZ"));
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
inject.setStaticField(Ofy.class, "clock", clock);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransact_doesNothing_noCommitLogIsSaved() {
|
||||
tm().transact(() -> {});
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class)).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransact_savesDataAndCommitLog() {
|
||||
tm().transact(() -> auditedOfy().save().entity(Root.create(1, getCrossTldKey())).now());
|
||||
assertThat(auditedOfy().load().key(Key.create(getCrossTldKey(), Root.class, 1)).now().value)
|
||||
.isEqualTo("value");
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class)).hasSize(1);
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class)).hasSize(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransact_saveWithoutBackup_noCommitLogIsSaved() {
|
||||
tm().transact(
|
||||
() -> auditedOfy().saveWithoutBackup().entity(Root.create(1, getCrossTldKey())).now());
|
||||
assertThat(auditedOfy().load().key(Key.create(getCrossTldKey(), Root.class, 1)).now().value)
|
||||
.isEqualTo("value");
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class)).isEmpty();
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class)).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransact_deleteWithoutBackup_noCommitLogIsSaved() {
|
||||
tm().transact(
|
||||
() -> auditedOfy().saveWithoutBackup().entity(Root.create(1, getCrossTldKey())).now());
|
||||
tm().transact(() -> auditedOfy().deleteWithoutBackup().key(Key.create(Root.class, 1)));
|
||||
assertThat(auditedOfy().load().key(Key.create(Root.class, 1)).now()).isNull();
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class)).isEmpty();
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class)).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransact_savesEntity_itsProtobufFormIsStoredInCommitLog() {
|
||||
tm().transact(() -> auditedOfy().save().entity(Root.create(1, getCrossTldKey())).now());
|
||||
final byte[] entityProtoBytes =
|
||||
auditedOfy().load().type(CommitLogMutation.class).first().now().entityProtoBytes;
|
||||
// This transaction is needed so that save().toEntity() can access
|
||||
// auditedOfy().getTransactionTime()
|
||||
// when it attempts to set the update timestamp.
|
||||
tm().transact(
|
||||
() ->
|
||||
assertThat(entityProtoBytes)
|
||||
.isEqualTo(
|
||||
convertToPb(auditedOfy().save().toEntity(Root.create(1, getCrossTldKey())))
|
||||
.toByteArray()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransact_savesEntity_mutationIsChildOfManifest() {
|
||||
tm().transact(() -> auditedOfy().save().entity(Root.create(1, getCrossTldKey())).now());
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.type(CommitLogMutation.class)
|
||||
.ancestor(auditedOfy().load().type(CommitLogManifest.class).first().now()))
|
||||
.hasSize(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransactNew_savesDataAndCommitLog() {
|
||||
tm().transactNew(() -> auditedOfy().save().entity(Root.create(1, getCrossTldKey())).now());
|
||||
assertThat(auditedOfy().load().key(Key.create(getCrossTldKey(), Root.class, 1)).now().value)
|
||||
.isEqualTo("value");
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class)).hasSize(1);
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class)).hasSize(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransact_multipleSaves_logsMultipleMutations() {
|
||||
tm().transact(
|
||||
() -> {
|
||||
auditedOfy().save().entity(Root.create(1, getCrossTldKey())).now();
|
||||
auditedOfy().save().entity(Root.create(2, getCrossTldKey())).now();
|
||||
});
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class)).hasSize(1);
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class)).hasSize(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransact_deletion_deletesAndLogsWithoutMutation() {
|
||||
tm().transact(
|
||||
() -> auditedOfy().saveWithoutBackup().entity(Root.create(1, getCrossTldKey())).now());
|
||||
clock.advanceOneMilli();
|
||||
final Key<Root> otherTldKey = Key.create(getCrossTldKey(), Root.class, 1);
|
||||
tm().transact(() -> auditedOfy().delete().key(otherTldKey));
|
||||
assertThat(auditedOfy().load().key(otherTldKey).now()).isNull();
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class)).hasSize(1);
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class)).isEmpty();
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class).first().now().getDeletions())
|
||||
.containsExactly(otherTldKey);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransactNew_deleteNotBackedUpKind_throws() {
|
||||
final CommitLogManifest backupsArentAllowedOnMe =
|
||||
CommitLogManifest.create(getBucketKey(1), clock.nowUtc(), ImmutableSet.of());
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> tm().transactNew(() -> auditedOfy().delete().entity(backupsArentAllowedOnMe)));
|
||||
assertThat(thrown).hasMessageThat().contains("Can't save/delete a @NotBackedUp");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransactNew_saveNotBackedUpKind_throws() {
|
||||
final CommitLogManifest backupsArentAllowedOnMe =
|
||||
CommitLogManifest.create(getBucketKey(1), clock.nowUtc(), ImmutableSet.of());
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> tm().transactNew(() -> auditedOfy().save().entity(backupsArentAllowedOnMe)));
|
||||
assertThat(thrown).hasMessageThat().contains("Can't save/delete a @NotBackedUp");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransactNew_deleteVirtualEntityKey_throws() {
|
||||
final Key<TestVirtualObject> virtualEntityKey = TestVirtualObject.createKey("virtual");
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> tm().transactNew(() -> auditedOfy().delete().key(virtualEntityKey)));
|
||||
assertThat(thrown).hasMessageThat().contains("Can't save/delete a @VirtualEntity");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransactNew_saveVirtualEntity_throws() {
|
||||
final TestVirtualObject virtualEntity = TestVirtualObject.create("virtual");
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> tm().transactNew(() -> auditedOfy().save().entity(virtualEntity)));
|
||||
assertThat(thrown).hasMessageThat().contains("Can't save/delete a @VirtualEntity");
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_deleteWithoutBackup_withVirtualEntityKey_throws() {
|
||||
final Key<TestVirtualObject> virtualEntityKey = TestVirtualObject.createKey("virtual");
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> auditedOfy().deleteWithoutBackup().key(virtualEntityKey));
|
||||
assertThat(thrown).hasMessageThat().contains("Can't save/delete a @VirtualEntity");
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_saveWithoutBackup_withVirtualEntity_throws() {
|
||||
final TestVirtualObject virtualEntity = TestVirtualObject.create("virtual");
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> auditedOfy().saveWithoutBackup().entity(virtualEntity));
|
||||
assertThat(thrown).hasMessageThat().contains("Can't save/delete a @VirtualEntity");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransact_twoSavesOnSameKey_throws() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
tm().transact(
|
||||
() -> {
|
||||
auditedOfy().save().entity(Root.create(1, getCrossTldKey()));
|
||||
auditedOfy().save().entity(Root.create(1, getCrossTldKey()));
|
||||
}));
|
||||
assertThat(thrown).hasMessageThat().contains("Multiple entries with same key");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testTransact_saveAndDeleteSameKey_throws() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
tm().transact(
|
||||
() -> {
|
||||
auditedOfy().save().entity(Root.create(1, getCrossTldKey()));
|
||||
auditedOfy().delete().entity(Root.create(1, getCrossTldKey()));
|
||||
}));
|
||||
assertThat(thrown).hasMessageThat().contains("Multiple entries with same key");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSavingRootAndChild_updatesTimestampOnBackupGroupRoot() {
|
||||
tm().transact(() -> auditedOfy().save().entity(Root.create(1, getCrossTldKey())));
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 1))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc());
|
||||
clock.advanceOneMilli();
|
||||
tm().transact(
|
||||
() -> {
|
||||
auditedOfy().save().entity(Root.create(1, getCrossTldKey()));
|
||||
auditedOfy().save().entity(new Child());
|
||||
});
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 1))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSavingOnlyChild_updatesTimestampOnBackupGroupRoot() {
|
||||
tm().transact(() -> auditedOfy().save().entity(Root.create(1, getCrossTldKey())));
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 1))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc());
|
||||
clock.advanceOneMilli();
|
||||
tm().transact(() -> auditedOfy().save().entity(new Child()));
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 1))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDeletingChild_updatesTimestampOnBackupGroupRoot() {
|
||||
tm().transact(() -> auditedOfy().save().entity(Root.create(1, getCrossTldKey())));
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 1))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc());
|
||||
clock.advanceOneMilli();
|
||||
// The fact that the child was never persisted is irrelevant.
|
||||
tm().transact(() -> auditedOfy().delete().entity(new Child()));
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 1))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testReadingRoot_doesntUpdateTimestamp() {
|
||||
tm().transact(() -> auditedOfy().save().entity(Root.create(1, getCrossTldKey())));
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 1))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc());
|
||||
clock.advanceOneMilli();
|
||||
tm().transact(
|
||||
() -> {
|
||||
// Don't remove this line, as without saving *something* the commit log code will
|
||||
// never be invoked and the test will trivially pass.
|
||||
auditedOfy().save().entity(Root.create(2, getCrossTldKey()));
|
||||
auditedOfy().load().entity(Root.create(1, getCrossTldKey()));
|
||||
});
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 1))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc().minusMillis(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testReadingChild_doesntUpdateTimestampOnBackupGroupRoot() {
|
||||
tm().transact(() -> auditedOfy().save().entity(Root.create(1, getCrossTldKey())));
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 1))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc());
|
||||
clock.advanceOneMilli();
|
||||
tm().transact(
|
||||
() -> {
|
||||
// Don't remove this line, as without saving *something* the commit log code will
|
||||
// never be invoked and the test will trivially pass
|
||||
auditedOfy().save().entity(Root.create(2, getCrossTldKey()));
|
||||
auditedOfy().load().entity(new Child()); // All Child objects are under Root(1).
|
||||
});
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 1))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc().minusMillis(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSavingAcrossBackupGroupRoots_updatesCorrectTimestamps() {
|
||||
// Create three roots.
|
||||
tm().transact(
|
||||
() -> {
|
||||
auditedOfy().save().entity(Root.create(1, getCrossTldKey()));
|
||||
auditedOfy().save().entity(Root.create(2, getCrossTldKey()));
|
||||
auditedOfy().save().entity(Root.create(3, getCrossTldKey()));
|
||||
});
|
||||
auditedOfy().clearSessionCache();
|
||||
for (int i = 1; i <= 3; i++) {
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, i))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc());
|
||||
}
|
||||
clock.advanceOneMilli();
|
||||
// Mutate one root, and a child of a second, ignoring the third.
|
||||
tm().transact(
|
||||
() -> {
|
||||
auditedOfy().save().entity(new Child()); // All Child objects are under Root(1).
|
||||
auditedOfy().save().entity(Root.create(2, getCrossTldKey()));
|
||||
});
|
||||
auditedOfy().clearSessionCache();
|
||||
// Child was touched.
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 1))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc());
|
||||
// Directly touched.
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 2))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc());
|
||||
// Wasn't touched.
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Key.create(getCrossTldKey(), Root.class, 3))
|
||||
.now()
|
||||
.getUpdateTimestamp()
|
||||
.getTimestamp())
|
||||
.isEqualTo(clock.nowUtc().minusMillis(1));
|
||||
}
|
||||
|
||||
@Entity
|
||||
static class Root extends BackupGroupRoot {
|
||||
|
||||
@Parent
|
||||
Key<EntityGroupRoot> parent;
|
||||
|
||||
@Id
|
||||
long id;
|
||||
|
||||
String value;
|
||||
|
||||
static Root create(long id, Key<EntityGroupRoot> parent) {
|
||||
Root result = new Root();
|
||||
result.parent = parent;
|
||||
result.id = id;
|
||||
result.value = "value";
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@Entity
|
||||
static class Child extends ImmutableObject {
|
||||
@Parent
|
||||
Key<Root> parent = Key.create(Root.create(1, getCrossTldKey()));
|
||||
|
||||
@Id
|
||||
long id = 1;
|
||||
}
|
||||
}
|
||||
@@ -90,89 +90,6 @@ public class OfyTest {
|
||||
// This can't be initialized earlier because namespaces need the AppEngineExtension to work.
|
||||
}
|
||||
|
||||
private void doBackupGroupRootTimestampInversionTest(Runnable runnable) {
|
||||
DateTime groupTimestamp =
|
||||
auditedOfy().load().key(someObject.getParent()).now().getUpdateTimestamp().getTimestamp();
|
||||
// Set the clock in Ofy to the same time as the backup group root's save time.
|
||||
Ofy ofy = new Ofy(new FakeClock(groupTimestamp));
|
||||
TimestampInversionException thrown =
|
||||
assertThrows(TimestampInversionException.class, () -> ofy.transact(runnable));
|
||||
assertThat(thrown)
|
||||
.hasMessageThat()
|
||||
.contains(
|
||||
String.format(
|
||||
"Timestamp inversion between transaction time (%s) and entities rooted under:\n"
|
||||
+ "{Key<?>(ContactResource(\"2-ROID\"))=%s}",
|
||||
groupTimestamp, groupTimestamp));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testBackupGroupRootTimestampsMustIncreaseOnSave() {
|
||||
doBackupGroupRootTimestampInversionTest(
|
||||
() -> auditedOfy().save().entity(someObject.asHistoryEntry()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testBackupGroupRootTimestampsMustIncreaseOnDelete() {
|
||||
doBackupGroupRootTimestampInversionTest(() -> auditedOfy().delete().entity(someObject));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSavingKeyTwice() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
tm().transact(
|
||||
() -> {
|
||||
auditedOfy().save().entity(someObject.asHistoryEntry());
|
||||
auditedOfy().save().entity(someObject.asHistoryEntry());
|
||||
}));
|
||||
assertThat(thrown).hasMessageThat().contains("Multiple entries with same key");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDeletingKeyTwice() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
tm().transact(
|
||||
() -> {
|
||||
auditedOfy().delete().entity(someObject);
|
||||
auditedOfy().delete().entity(someObject);
|
||||
}));
|
||||
assertThat(thrown).hasMessageThat().contains("Multiple entries with same key");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSaveDeleteKey() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
tm().transact(
|
||||
() -> {
|
||||
auditedOfy().save().entity(someObject.asHistoryEntry());
|
||||
auditedOfy().delete().entity(someObject);
|
||||
}));
|
||||
assertThat(thrown).hasMessageThat().contains("Multiple entries with same key");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDeleteSaveKey() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class,
|
||||
() ->
|
||||
tm().transact(
|
||||
() -> {
|
||||
auditedOfy().delete().entity(someObject);
|
||||
auditedOfy().save().entity(someObject.asHistoryEntry());
|
||||
}));
|
||||
assertThat(thrown).hasMessageThat().contains("Multiple entries with same key");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSavingKeyTwiceInOneCall() {
|
||||
assertThrows(
|
||||
|
||||
@@ -1,183 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.model.translators;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.persistence.transaction.TransactionManagerFactory.tm;
|
||||
import static org.joda.time.Duration.standardDays;
|
||||
import static org.joda.time.Duration.standardHours;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableSortedMap;
|
||||
import com.googlecode.objectify.Key;
|
||||
import com.googlecode.objectify.annotation.Entity;
|
||||
import google.registry.model.common.CrossTldSingleton;
|
||||
import google.registry.model.ofy.CommitLogManifest;
|
||||
import google.registry.model.ofy.Ofy;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.testing.InjectExtension;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import java.util.List;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link CommitLogRevisionsTranslatorFactory}. */
|
||||
public class CommitLogRevisionsTranslatorFactoryTest {
|
||||
|
||||
private static final DateTime START_TIME = DateTime.parse("2000-01-01TZ");
|
||||
|
||||
@Entity(name = "ClrtfTestEntity")
|
||||
public static class TestObject extends CrossTldSingleton {
|
||||
ImmutableSortedMap<DateTime, Key<CommitLogManifest>> revisions = ImmutableSortedMap.of();
|
||||
}
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder()
|
||||
.withDatastoreAndCloudSql()
|
||||
.withOfyTestEntities(TestObject.class)
|
||||
.build();
|
||||
|
||||
@RegisterExtension public final InjectExtension inject = new InjectExtension();
|
||||
|
||||
private final FakeClock clock = new FakeClock(START_TIME);
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
inject.setStaticField(Ofy.class, "clock", clock);
|
||||
}
|
||||
|
||||
private void save(final TestObject object) {
|
||||
tm().transact(() -> auditedOfy().save().entity(object));
|
||||
}
|
||||
|
||||
private TestObject reload() {
|
||||
auditedOfy().clearSessionCache();
|
||||
return auditedOfy().load().entity(new TestObject()).now();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSave_doesNotMutateOriginalResource() {
|
||||
TestObject object = new TestObject();
|
||||
save(object);
|
||||
assertThat(object.revisions).isEmpty();
|
||||
assertThat(reload().revisions).isNotEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSave_translatorAddsKeyToCommitLogToField() {
|
||||
save(new TestObject());
|
||||
TestObject object = reload();
|
||||
assertThat(object.revisions).hasSize(1);
|
||||
assertThat(object.revisions).containsKey(START_TIME);
|
||||
CommitLogManifest commitLogManifest =
|
||||
auditedOfy().load().key(object.revisions.get(START_TIME)).now();
|
||||
assertThat(commitLogManifest.getCommitTime()).isEqualTo(START_TIME);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSave_twoVersionsOnOneDay_keyToLastCommitLogsGetsStored() {
|
||||
save(new TestObject());
|
||||
clock.advanceBy(standardHours(1));
|
||||
save(reload());
|
||||
TestObject object = reload();
|
||||
assertThat(object.revisions).hasSize(1);
|
||||
assertThat(object.revisions).containsKey(START_TIME.plusHours(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSave_twoVersionsOnTwoDays_keyToBothCommitLogsGetsStored() {
|
||||
save(new TestObject());
|
||||
clock.advanceBy(standardDays(1));
|
||||
save(reload());
|
||||
TestObject object = reload();
|
||||
assertThat(object.revisions).hasSize(2);
|
||||
assertThat(object.revisions).containsKey(START_TIME);
|
||||
assertThat(object.revisions).containsKey(START_TIME.plusDays(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSave_moreThanThirtyDays_truncatedAtThirtyPlusOne() {
|
||||
save(new TestObject());
|
||||
for (int i = 0; i < 35; i++) {
|
||||
clock.advanceBy(standardDays(1));
|
||||
save(reload());
|
||||
}
|
||||
TestObject object = reload();
|
||||
assertThat(object.revisions).hasSize(31);
|
||||
assertThat(object.revisions.firstKey()).isEqualTo(clock.nowUtc().minusDays(30));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSave_moreThanThirtySparse_keepsOneEntryPrecedingThirtyDays() {
|
||||
save(new TestObject());
|
||||
assertThat(reload().revisions).hasSize(1);
|
||||
assertThat(reload().revisions.firstKey()).isEqualTo(clock.nowUtc().minusDays(0));
|
||||
clock.advanceBy(standardDays(29));
|
||||
save(reload());
|
||||
assertThat(reload().revisions).hasSize(2);
|
||||
assertThat(reload().revisions.firstKey()).isEqualTo(clock.nowUtc().minusDays(29));
|
||||
clock.advanceBy(standardDays(29));
|
||||
save(reload());
|
||||
assertThat(reload().revisions).hasSize(3);
|
||||
assertThat(reload().revisions.firstKey()).isEqualTo(clock.nowUtc().minusDays(58));
|
||||
clock.advanceBy(standardDays(29));
|
||||
save(reload());
|
||||
assertThat(reload().revisions).hasSize(3);
|
||||
assertThat(reload().revisions.firstKey()).isEqualTo(clock.nowUtc().minusDays(58));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
void testRawEntityLayout() {
|
||||
save(new TestObject());
|
||||
clock.advanceBy(standardDays(1));
|
||||
com.google.appengine.api.datastore.Entity entity =
|
||||
tm().transactNewReadOnly(() -> auditedOfy().save().toEntity(reload()));
|
||||
assertThat(entity.getProperties().keySet()).containsExactly("revisions.key", "revisions.value");
|
||||
assertThat(entity.getProperties())
|
||||
.containsEntry(
|
||||
"revisions.key",
|
||||
ImmutableList.of(START_TIME.toDate(), START_TIME.plusDays(1).toDate()));
|
||||
assertThat(entity.getProperty("revisions.value")).isInstanceOf(List.class);
|
||||
assertThat(((List<Object>) entity.getProperty("revisions.value")).get(0))
|
||||
.isInstanceOf(com.google.appengine.api.datastore.Key.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testLoad_neverSaved_returnsNull() {
|
||||
assertThat(auditedOfy().load().entity(new TestObject()).now()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testLoad_missingRevisionRawProperties_createsEmptyObject() {
|
||||
com.google.appengine.api.datastore.Entity entity =
|
||||
tm().transactNewReadOnly(() -> auditedOfy().save().toEntity(new TestObject()));
|
||||
entity.removeProperty("revisions.key");
|
||||
entity.removeProperty("revisions.value");
|
||||
TestObject object = auditedOfy().load().fromEntity(entity);
|
||||
assertThat(object.revisions).isNotNull();
|
||||
assertThat(object.revisions).isEmpty();
|
||||
}
|
||||
}
|
||||
@@ -17,14 +17,11 @@ package google.registry.model.translators;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.testing.DatabaseHelper.newDomainBase;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveContact;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.model.billing.BillingEvent;
|
||||
import google.registry.model.common.ClassPathManager;
|
||||
import google.registry.model.domain.DomainBase;
|
||||
import google.registry.model.ofy.CommitLogCheckpoint;
|
||||
import google.registry.model.ofy.CommitLogCheckpointRoot;
|
||||
import google.registry.model.reporting.HistoryEntry;
|
||||
import google.registry.persistence.VKey;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
@@ -62,20 +59,6 @@ public class VKeyTranslatorFactoryTest {
|
||||
assertThat(vkey.getSqlKey()).isEqualTo("ROID-1");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testKeyWithParent() {
|
||||
Key<CommitLogCheckpointRoot> parent = Key.create(CommitLogCheckpointRoot.class, "parent");
|
||||
Key<CommitLogCheckpoint> key = Key.create(parent, CommitLogCheckpoint.class, "foo");
|
||||
IllegalArgumentException e =
|
||||
assertThrows(IllegalArgumentException.class, () -> VKeyTranslatorFactory.createVKey(key));
|
||||
assertThat(e)
|
||||
.hasMessageThat()
|
||||
.isEqualTo(
|
||||
"Cannot auto-convert key Key<?>(CommitLogCheckpointRoot(\"parent\")/"
|
||||
+ "CommitLogCheckpoint(\"foo\")) of kind CommitLogCheckpoint because it has a "
|
||||
+ "parent. Add a createVKey(Key) method for it.");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testEntityWithAncestor() {
|
||||
Key<DomainBase> domainKey = Key.create(DomainBase.class, "ROID-1");
|
||||
|
||||
@@ -18,7 +18,7 @@ import static com.google.common.io.BaseEncoding.base16;
|
||||
import static google.registry.testing.DatabaseHelper.generateNewContactHostRoid;
|
||||
import static google.registry.testing.DatabaseHelper.generateNewDomainRoid;
|
||||
import static google.registry.testing.DatabaseHelper.persistResource;
|
||||
import static google.registry.testing.DatabaseHelper.persistResourceWithCommitLog;
|
||||
import static google.registry.testing.DatabaseHelper.persistResourceWithBackup;
|
||||
import static google.registry.testing.DatabaseHelper.persistSimpleResource;
|
||||
import static google.registry.util.DateTimeUtils.END_OF_TIME;
|
||||
import static org.joda.money.CurrencyUnit.USD;
|
||||
@@ -76,7 +76,7 @@ final class RdeFixtures {
|
||||
.build());
|
||||
clock.advanceOneMilli();
|
||||
BillingEvent.OneTime billingEvent =
|
||||
persistResourceWithCommitLog(
|
||||
persistResourceWithBackup(
|
||||
new BillingEvent.OneTime.Builder()
|
||||
.setReason(Reason.CREATE)
|
||||
.setTargetId("example." + tld)
|
||||
@@ -215,13 +215,13 @@ final class RdeFixtures {
|
||||
.build())
|
||||
.build();
|
||||
clock.advanceOneMilli();
|
||||
return persistResourceWithCommitLog(domain);
|
||||
return persistResourceWithBackup(domain);
|
||||
}
|
||||
|
||||
static ContactResource makeContactResource(
|
||||
FakeClock clock, String id, String name, String email) {
|
||||
clock.advanceOneMilli();
|
||||
return persistResourceWithCommitLog(
|
||||
return persistResourceWithBackup(
|
||||
new ContactResource.Builder()
|
||||
.setContactId(id)
|
||||
.setRepoId(generateNewContactHostRoid())
|
||||
@@ -252,7 +252,7 @@ final class RdeFixtures {
|
||||
|
||||
static HostResource makeHostResource(FakeClock clock, String fqhn, String ip) {
|
||||
clock.advanceOneMilli();
|
||||
return persistResourceWithCommitLog(
|
||||
return persistResourceWithBackup(
|
||||
new HostResource.Builder()
|
||||
.setRepoId(generateNewContactHostRoid())
|
||||
.setCreationRegistrarId("LawyerCat")
|
||||
|
||||
@@ -1,885 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.rde;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.common.Cursor.CursorType.BRDA;
|
||||
import static google.registry.model.common.Cursor.CursorType.RDE_STAGING;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.persistence.transaction.TransactionManagerFactory.tm;
|
||||
import static google.registry.rde.RdeFixtures.makeContactResource;
|
||||
import static google.registry.rde.RdeFixtures.makeDomainBase;
|
||||
import static google.registry.rde.RdeFixtures.makeHostResource;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.persistResource;
|
||||
import static google.registry.testing.DatabaseHelper.persistResourceWithCommitLog;
|
||||
import static google.registry.testing.TaskQueueHelper.assertAtLeastOneTaskIsEnqueued;
|
||||
import static google.registry.testing.TaskQueueHelper.assertNoTasksEnqueued;
|
||||
import static google.registry.testing.TestDataHelper.loadFile;
|
||||
import static google.registry.tldconfig.idn.IdnTableEnum.EXTENDED_LATIN;
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.google.cloud.storage.BlobId;
|
||||
import com.google.cloud.storage.contrib.nio.testing.LocalStorageHelper;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.net.InetAddresses;
|
||||
import google.registry.gcs.GcsUtils;
|
||||
import google.registry.keyring.api.Keyring;
|
||||
import google.registry.keyring.api.PgpHelper;
|
||||
import google.registry.model.common.Cursor;
|
||||
import google.registry.model.common.Cursor.CursorType;
|
||||
import google.registry.model.host.HostResource;
|
||||
import google.registry.model.ofy.Ofy;
|
||||
import google.registry.model.tld.Registry;
|
||||
import google.registry.request.HttpException.BadRequestException;
|
||||
import google.registry.request.RequestParameters;
|
||||
import google.registry.testing.CloudTasksHelper;
|
||||
import google.registry.testing.CloudTasksHelper.TaskMatcher;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.testing.FakeKeyringModule;
|
||||
import google.registry.testing.FakeLockHandler;
|
||||
import google.registry.testing.FakeResponse;
|
||||
import google.registry.testing.InjectExtension;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import google.registry.testing.mapreduce.MapreduceTestCase;
|
||||
import google.registry.tldconfig.idn.IdnTableEnum;
|
||||
import google.registry.xjc.XjcXmlTransformer;
|
||||
import google.registry.xjc.rde.XjcRdeContentType;
|
||||
import google.registry.xjc.rde.XjcRdeDeposit;
|
||||
import google.registry.xjc.rde.XjcRdeDepositTypeType;
|
||||
import google.registry.xjc.rdedomain.XjcRdeDomain;
|
||||
import google.registry.xjc.rdeheader.XjcRdeHeader;
|
||||
import google.registry.xjc.rdeheader.XjcRdeHeaderCount;
|
||||
import google.registry.xjc.rdehost.XjcRdeHost;
|
||||
import google.registry.xjc.rdeidn.XjcRdeIdn;
|
||||
import google.registry.xjc.rderegistrar.XjcRdeRegistrar;
|
||||
import google.registry.xml.XmlException;
|
||||
import google.registry.xml.XmlTestUtils;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import javax.xml.bind.JAXBElement;
|
||||
import org.bouncycastle.openpgp.PGPException;
|
||||
import org.bouncycastle.openpgp.PGPPrivateKey;
|
||||
import org.bouncycastle.openpgp.PGPPublicKey;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeConstants;
|
||||
import org.joda.time.Duration;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link RdeStagingAction} in Datastore. */
|
||||
public class RdeStagingActionDatastoreTest extends MapreduceTestCase<RdeStagingAction> {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
private static final BlobId XML_FILE =
|
||||
BlobId.of("rde-bucket", "lol_2000-01-01_full_S1_R0.xml.ghostryde");
|
||||
private static final BlobId LENGTH_FILE =
|
||||
BlobId.of("rde-bucket", "lol_2000-01-01_full_S1_R0.xml.length");
|
||||
|
||||
@RegisterExtension public final InjectExtension inject = new InjectExtension();
|
||||
|
||||
/**
|
||||
* Without autoIncrement mode, the fake clock won't advance between Mapper and Reducer
|
||||
* transactions when action is invoked, resulting in rolled back reducer transaction (due to
|
||||
* TimestampInversionException if both transactions are mapped to the same CommitLog bucket) and
|
||||
* multiple RdeUplaod/BrdaCopy tasks being enqueued (due to transaction retries, since Cloud Tasks
|
||||
* enqueuing is not transactional with Datastore transactions).
|
||||
*/
|
||||
private final FakeClock clock = new FakeClock().setAutoIncrementByOneMilli();
|
||||
|
||||
private final FakeResponse response = new FakeResponse();
|
||||
private final GcsUtils gcsUtils = new GcsUtils(LocalStorageHelper.getOptions());
|
||||
private final List<? super XjcRdeContentType> alreadyExtracted = new ArrayList<>();
|
||||
private final CloudTasksHelper cloudTasksHelper = new CloudTasksHelper();
|
||||
|
||||
private static PGPPublicKey encryptKey;
|
||||
private static PGPPrivateKey decryptKey;
|
||||
|
||||
@BeforeAll
|
||||
static void beforeAll() {
|
||||
try (Keyring keyring = new FakeKeyringModule().get()) {
|
||||
encryptKey = keyring.getRdeStagingEncryptionKey();
|
||||
decryptKey = keyring.getRdeStagingDecryptionKey();
|
||||
}
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
inject.setStaticField(Ofy.class, "clock", clock);
|
||||
action = new RdeStagingAction();
|
||||
action.clock = clock;
|
||||
action.mrRunner = makeDefaultRunner();
|
||||
action.lenient = false;
|
||||
action.reducerFactory = new RdeStagingReducer.Factory();
|
||||
action.reducerFactory.cloudTasksUtils = cloudTasksHelper.getTestCloudTasksUtils();
|
||||
action.reducerFactory.lockHandler = new FakeLockHandler(true);
|
||||
action.reducerFactory.bucket = "rde-bucket";
|
||||
action.reducerFactory.lockTimeout = Duration.standardHours(1);
|
||||
action.reducerFactory.stagingKeyBytes = PgpHelper.convertPublicKeyToBytes(encryptKey);
|
||||
action.pendingDepositChecker = new PendingDepositChecker();
|
||||
action.pendingDepositChecker.brdaDayOfWeek = DateTimeConstants.TUESDAY;
|
||||
action.pendingDepositChecker.brdaInterval = Duration.standardDays(7);
|
||||
action.pendingDepositChecker.clock = clock;
|
||||
action.pendingDepositChecker.rdeInterval = Duration.standardDays(1);
|
||||
action.gcsUtils = gcsUtils;
|
||||
action.response = response;
|
||||
action.transactionCooldown = Duration.ZERO;
|
||||
action.directory = Optional.empty();
|
||||
action.modeStrings = ImmutableSet.of();
|
||||
action.tlds = ImmutableSet.of();
|
||||
action.watermarks = ImmutableSet.of();
|
||||
action.revision = Optional.empty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_modeInNonManualMode_throwsException() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.modeStrings = ImmutableSet.of("full");
|
||||
assertThrows(BadRequestException.class, action::run);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_tldInNonManualMode_throwsException() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.tlds = ImmutableSet.of("tld");
|
||||
assertThrows(BadRequestException.class, action::run);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_watermarkInNonManualMode_throwsException() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.watermarks = ImmutableSet.of(clock.nowUtc());
|
||||
assertThrows(BadRequestException.class, action::run);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_revisionInNonManualMode_throwsException() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.revision = Optional.of(42);
|
||||
assertThrows(BadRequestException.class, action::run);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_noTlds_returns204() {
|
||||
action.run();
|
||||
assertThat(response.getStatus()).isEqualTo(204);
|
||||
assertNoTasksEnqueued("mapreduce");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_tldWithoutEscrowEnabled_returns204() {
|
||||
createTld("lol");
|
||||
persistResource(Registry.get("lol").asBuilder().setEscrowEnabled(false).build());
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.run();
|
||||
assertThat(response.getStatus()).isEqualTo(204);
|
||||
assertNoTasksEnqueued("mapreduce");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_tldWithEscrowEnabled_runsMapReduce() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.run();
|
||||
assertThat(response.getStatus()).isEqualTo(200);
|
||||
assertThat(response.getPayload()).contains("/_ah/pipeline/status.html?root=");
|
||||
assertAtLeastOneTaskIsEnqueued("mapreduce");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_withinTransactionCooldown_getsExcludedAndReturns204() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01T00:04:59Z"));
|
||||
action.transactionCooldown = Duration.standardMinutes(5);
|
||||
action.run();
|
||||
assertThat(response.getStatus()).isEqualTo(204);
|
||||
assertNoTasksEnqueued("mapreduce");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun_afterTransactionCooldown_runsMapReduce() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01T00:05:00Z"));
|
||||
action.transactionCooldown = Duration.standardMinutes(5);
|
||||
action.run();
|
||||
assertAtLeastOneTaskIsEnqueued("mapreduce");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testManualRun_emptyMode_throwsException() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.manual = true;
|
||||
action.directory = Optional.of("test/");
|
||||
action.modeStrings = ImmutableSet.of();
|
||||
action.tlds = ImmutableSet.of("lol");
|
||||
action.watermarks = ImmutableSet.of(clock.nowUtc());
|
||||
assertThrows(BadRequestException.class, action::run);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testManualRun_invalidMode_throwsException() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.manual = true;
|
||||
action.directory = Optional.of("test/");
|
||||
action.modeStrings = ImmutableSet.of("full", "thing");
|
||||
action.tlds = ImmutableSet.of("lol");
|
||||
action.watermarks = ImmutableSet.of(clock.nowUtc());
|
||||
assertThrows(BadRequestException.class, action::run);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testManualRun_emptyTld_throwsException() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.manual = true;
|
||||
action.directory = Optional.of("test/");
|
||||
action.modeStrings = ImmutableSet.of("full");
|
||||
action.tlds = ImmutableSet.of();
|
||||
action.watermarks = ImmutableSet.of(clock.nowUtc());
|
||||
assertThrows(BadRequestException.class, action::run);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testManualRun_emptyWatermark_throwsException() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.manual = true;
|
||||
action.directory = Optional.of("test/");
|
||||
action.modeStrings = ImmutableSet.of("full");
|
||||
action.tlds = ImmutableSet.of("lol");
|
||||
action.watermarks = ImmutableSet.of();
|
||||
assertThrows(BadRequestException.class, action::run);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testManualRun_nonDayStartWatermark_throwsException() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.manual = true;
|
||||
action.directory = Optional.of("test/");
|
||||
action.modeStrings = ImmutableSet.of("full");
|
||||
action.tlds = ImmutableSet.of("lol");
|
||||
action.watermarks = ImmutableSet.of(DateTime.parse("2001-01-01T01:36:45Z"));
|
||||
assertThrows(BadRequestException.class, action::run);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testManualRun_invalidRevision_throwsException() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.manual = true;
|
||||
action.directory = Optional.of("test/");
|
||||
action.modeStrings = ImmutableSet.of("full");
|
||||
action.tlds = ImmutableSet.of("lol");
|
||||
action.watermarks = ImmutableSet.of(DateTime.parse("2001-01-01T00:00:00Z"));
|
||||
action.revision = Optional.of(-1);
|
||||
assertThrows(BadRequestException.class, action::run);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testManualRun_validParameters_runsMapReduce() {
|
||||
createTldWithEscrowEnabled("lol");
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.manual = true;
|
||||
action.directory = Optional.of("test/");
|
||||
action.modeStrings = ImmutableSet.of("full");
|
||||
action.tlds = ImmutableSet.of("lol");
|
||||
action.watermarks = ImmutableSet.of(DateTime.parse("2001-01-01TZ"));
|
||||
action.run();
|
||||
assertThat(response.getStatus()).isEqualTo(200);
|
||||
assertThat(response.getPayload()).contains("_ah/pipeline/status.html?root=");
|
||||
assertAtLeastOneTaskIsEnqueued("mapreduce");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_bunchOfResources_headerHasCorrectCounts() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("lol");
|
||||
makeDomainBase(clock, "lol");
|
||||
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
XjcRdeDeposit deposit =
|
||||
unmarshal(
|
||||
XjcRdeDeposit.class, Ghostryde.decode(gcsUtils.readBytesFrom(XML_FILE), decryptKey));
|
||||
XjcRdeHeader header = extractAndRemoveContentWithType(XjcRdeHeader.class, deposit);
|
||||
|
||||
assertThat(header.getTld()).isEqualTo("lol");
|
||||
assertThat(mapifyCounts(header))
|
||||
.containsExactly(
|
||||
RdeResourceType.CONTACT.getUri(),
|
||||
3L,
|
||||
RdeResourceType.DOMAIN.getUri(),
|
||||
1L,
|
||||
RdeResourceType.HOST.getUri(),
|
||||
2L,
|
||||
RdeResourceType.REGISTRAR.getUri(),
|
||||
2L,
|
||||
RdeResourceType.IDN.getUri(),
|
||||
(long) IdnTableEnum.values().length);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_validHostResources_getPutInDeposit() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("lol");
|
||||
makeHostResource(clock, "ns1.cat.lol", "feed::a:bee");
|
||||
makeHostResource(clock, "ns2.cat.lol", "3.1.33.7");
|
||||
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
XjcRdeDeposit deposit =
|
||||
unmarshal(
|
||||
XjcRdeDeposit.class, Ghostryde.decode(gcsUtils.readBytesFrom(XML_FILE), decryptKey));
|
||||
assertThat(deposit.getType()).isEqualTo(XjcRdeDepositTypeType.FULL);
|
||||
assertThat(deposit.getId()).isEqualTo(RdeUtil.timestampToId(DateTime.parse("2000-01-01TZ")));
|
||||
assertThat(deposit.getWatermark()).isEqualTo(DateTime.parse("2000-01-01TZ"));
|
||||
assertThat(deposit.getResend()).isEqualTo(0);
|
||||
|
||||
XjcRdeHost host1 = extractAndRemoveContentWithType(XjcRdeHost.class, deposit);
|
||||
XjcRdeHost host2 = extractAndRemoveContentWithType(XjcRdeHost.class, deposit);
|
||||
XjcRdeHeader header = extractAndRemoveContentWithType(XjcRdeHeader.class, deposit);
|
||||
|
||||
assertThat(asList(host1.getName(), host2.getName()))
|
||||
.containsExactly("ns1.cat.lol", "ns2.cat.lol");
|
||||
assertThat(asList(host1.getAddrs().get(0).getValue(), host2.getAddrs().get(0).getValue()))
|
||||
.containsExactly("feed::a:bee", "3.1.33.7");
|
||||
|
||||
assertThat(header.getTld()).isEqualTo("lol");
|
||||
assertThat(mapifyCounts(header))
|
||||
.containsExactly(
|
||||
RdeResourceType.CONTACT.getUri(),
|
||||
0L,
|
||||
RdeResourceType.DOMAIN.getUri(),
|
||||
0L,
|
||||
RdeResourceType.HOST.getUri(),
|
||||
2L,
|
||||
RdeResourceType.REGISTRAR.getUri(),
|
||||
2L,
|
||||
RdeResourceType.IDN.getUri(),
|
||||
(long) IdnTableEnum.values().length);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_defaultTestFixtureRegistrars_getPutInDeposit() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("lol");
|
||||
makeHostResource(clock, "ns1.cat.lol", "feed::a:bee");
|
||||
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
XjcRdeDeposit deposit =
|
||||
unmarshal(
|
||||
XjcRdeDeposit.class, Ghostryde.decode(gcsUtils.readBytesFrom(XML_FILE), decryptKey));
|
||||
XjcRdeRegistrar registrar1 = extractAndRemoveContentWithType(XjcRdeRegistrar.class, deposit);
|
||||
XjcRdeRegistrar registrar2 = extractAndRemoveContentWithType(XjcRdeRegistrar.class, deposit);
|
||||
XjcRdeHeader header = extractAndRemoveContentWithType(XjcRdeHeader.class, deposit);
|
||||
|
||||
assertThat(asList(registrar1.getName(), registrar2.getName()))
|
||||
.containsExactly("New Registrar", "The Registrar");
|
||||
assertThat(mapifyCounts(header)).containsEntry(RdeResourceType.REGISTRAR.getUri(), 2L);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_sameDayRdeDeposit_advancesCursorToTomorrow() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("lol");
|
||||
makeDomainBase(clock, "lol");
|
||||
setCursor(Registry.get("lol"), RDE_STAGING, DateTime.parse("2000-01-01TZ"));
|
||||
setCursor(Registry.get("lol"), BRDA, DateTime.parse("2000-01-04TZ"));
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ")); // Saturday
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Cursor.createKey(RDE_STAGING, Registry.get("lol")))
|
||||
.now()
|
||||
.getCursorTime())
|
||||
.isEqualTo(DateTime.parse("2000-01-02TZ"));
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Cursor.createKey(BRDA, Registry.get("lol")))
|
||||
.now()
|
||||
.getCursorTime())
|
||||
.isEqualTo(DateTime.parse("2000-01-04TZ"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_onBrdaDay_advancesBothCursors() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("lol");
|
||||
makeDomainBase(clock, "lol");
|
||||
setCursor(Registry.get("lol"), RDE_STAGING, DateTime.parse("2000-01-04TZ"));
|
||||
setCursor(Registry.get("lol"), BRDA, DateTime.parse("2000-01-04TZ"));
|
||||
clock.setTo(DateTime.parse("2000-01-04TZ")); // Tuesday
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Cursor.createKey(RDE_STAGING, Registry.get("lol")))
|
||||
.now()
|
||||
.getCursorTime())
|
||||
.isEqualTo(DateTime.parse("2000-01-05TZ"));
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Cursor.createKey(BRDA, Registry.get("lol")))
|
||||
.now()
|
||||
.getCursorTime())
|
||||
.isEqualTo(DateTime.parse("2000-01-11TZ"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_onBrdaDay_enqueuesBothTasks() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("lol");
|
||||
makeDomainBase(clock, "lol");
|
||||
setCursor(Registry.get("lol"), RDE_STAGING, DateTime.parse("2000-01-04TZ"));
|
||||
setCursor(Registry.get("lol"), BRDA, DateTime.parse("2000-01-04TZ"));
|
||||
clock.setTo(DateTime.parse("2000-01-04TZ")); // Tuesday
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
// TODO(b/217773051): duplicate tasks are possible though unlikely. Consider if below calls are
|
||||
// appropriate since they don't allow duplicates.
|
||||
cloudTasksHelper.assertTasksEnqueued(
|
||||
"rde-upload",
|
||||
new TaskMatcher().url(RdeUploadAction.PATH).param(RequestParameters.PARAM_TLD, "lol"));
|
||||
cloudTasksHelper.assertTasksEnqueued(
|
||||
"brda",
|
||||
new TaskMatcher()
|
||||
.url(BrdaCopyAction.PATH)
|
||||
.param(RequestParameters.PARAM_TLD, "lol")
|
||||
.param(RdeModule.PARAM_WATERMARK, "2000-01-04T00:00:00.000Z"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_noEppResourcesAndWayInPast_depositsRegistrarsOnly() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("fop");
|
||||
setCursor(Registry.get("fop"), RDE_STAGING, DateTime.parse("1971-01-01TZ"));
|
||||
setCursor(Registry.get("fop"), BRDA, DateTime.parse("1971-01-05TZ"));
|
||||
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
for (BlobId filename :
|
||||
asList(
|
||||
BlobId.of("rde-bucket", "fop_1971-01-01_full_S1_R0.xml.ghostryde"),
|
||||
BlobId.of("rde-bucket", "fop_1971-01-05_thin_S1_R0.xml.ghostryde"))) {
|
||||
XjcRdeDeposit deposit =
|
||||
unmarshal(
|
||||
XjcRdeDeposit.class, Ghostryde.decode(gcsUtils.readBytesFrom(filename), decryptKey));
|
||||
XjcRdeRegistrar registrar1 = extractAndRemoveContentWithType(XjcRdeRegistrar.class, deposit);
|
||||
XjcRdeRegistrar registrar2 = extractAndRemoveContentWithType(XjcRdeRegistrar.class, deposit);
|
||||
XjcRdeHeader header = extractAndRemoveContentWithType(XjcRdeHeader.class, deposit);
|
||||
|
||||
assertThat(asList(registrar1.getName(), registrar2.getName()))
|
||||
.containsExactly("New Registrar", "The Registrar");
|
||||
assertThat(mapifyCounts(header)).containsEntry(RdeResourceType.REGISTRAR.getUri(), 2L);
|
||||
}
|
||||
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Cursor.createKey(RDE_STAGING, Registry.get("fop")))
|
||||
.now()
|
||||
.getCursorTime())
|
||||
.isEqualTo(DateTime.parse("1971-01-02TZ"));
|
||||
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Cursor.createKey(BRDA, Registry.get("fop")))
|
||||
.now()
|
||||
.getCursorTime())
|
||||
.isEqualTo(DateTime.parse("1971-01-12TZ"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_idnTables_goInDeposit() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("fop");
|
||||
makeDomainBase(clock, "fop");
|
||||
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
BlobId filename = BlobId.of("rde-bucket", "fop_2000-01-01_full_S1_R0.xml.ghostryde");
|
||||
XjcRdeDeposit deposit =
|
||||
unmarshal(
|
||||
XjcRdeDeposit.class, Ghostryde.decode(gcsUtils.readBytesFrom(filename), decryptKey));
|
||||
XjcRdeDomain domain = extractAndRemoveContentWithType(XjcRdeDomain.class, deposit);
|
||||
XjcRdeIdn firstIdn = extractAndRemoveContentWithType(XjcRdeIdn.class, deposit);
|
||||
XjcRdeHeader header = extractAndRemoveContentWithType(XjcRdeHeader.class, deposit);
|
||||
|
||||
assertThat(domain.getIdnTableId()).isEqualTo("extended_latin");
|
||||
assertThat(firstIdn.getId()).isEqualTo("extended_latin");
|
||||
assertThat(firstIdn.getUrl()).isEqualTo(EXTENDED_LATIN.getTable().getUrl().toString());
|
||||
assertThat(firstIdn.getUrlPolicy()).isEqualTo(EXTENDED_LATIN.getTable().getPolicy().toString());
|
||||
assertThat(mapifyCounts(header))
|
||||
.containsEntry(RdeResourceType.IDN.getUri(), (long) IdnTableEnum.values().length);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_withDomain_producesExpectedXml() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("lol");
|
||||
makeDomainBase(clock, "lol");
|
||||
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
XmlTestUtils.assertXmlEquals(
|
||||
loadFile(getClass(), "testMapReduce_withDomain_producesExpectedXml.xml"),
|
||||
readXml("lol_2000-01-01_full_S1_R0.xml.ghostryde"),
|
||||
"deposit.contents.registrar.crDate",
|
||||
"deposit.contents.registrar.upDate");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_withDomain_producesCorrectLengthFile() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("lol");
|
||||
makeDomainBase(clock, "lol");
|
||||
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
byte[] deposit = Ghostryde.decode(gcsUtils.readBytesFrom(XML_FILE), decryptKey);
|
||||
assertThat(Integer.parseInt(new String(gcsUtils.readBytesFrom(LENGTH_FILE), UTF_8)))
|
||||
.isEqualTo(deposit.length);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_withDomain_producesReportXml() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("lol");
|
||||
makeDomainBase(clock, "lol");
|
||||
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
XmlTestUtils.assertXmlEquals(
|
||||
loadFile(getClass(), "testMapReduce_withDomain_producesReportXml.xml"),
|
||||
readXml("lol_2000-01-01_full_S1_R0-report.xml.ghostryde"),
|
||||
"deposit.contents.registrar.crDate",
|
||||
"deposit.contents.registrar.upDate");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_twoDomainsDifferentTlds_isolatesDomains() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("boggle");
|
||||
makeDomainBase(clock, "boggle");
|
||||
createTldWithEscrowEnabled("lol");
|
||||
makeDomainBase(clock, "lol");
|
||||
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
clock.setAutoIncrementByOneMilli();
|
||||
action.run();
|
||||
clock.disableAutoIncrement();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
String boggleDeposit = readXml("boggle_2000-01-01_full_S1_R0.xml.ghostryde");
|
||||
assertThat(boggleDeposit).contains("love.boggle");
|
||||
assertThat(boggleDeposit).doesNotContain("love.lol");
|
||||
|
||||
String lolDeposit = readXml("lol_2000-01-01_full_S1_R0.xml.ghostryde");
|
||||
assertThat(lolDeposit).contains("love.lol");
|
||||
assertThat(lolDeposit).doesNotContain("love.boggle");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_twoHostsDifferentTlds_includedInBothTldDeposits() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("fop");
|
||||
makeHostResource(clock, "ns1.dein.fop", "a:fed::cafe");
|
||||
createTldWithEscrowEnabled("lol");
|
||||
makeHostResource(clock, "ns1.kuss.lol", "face::feed");
|
||||
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
clock.setAutoIncrementByOneMilli();
|
||||
action.run();
|
||||
clock.disableAutoIncrement();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
String fopDeposit = readXml("fop_2000-01-01_full_S1_R0.xml.ghostryde");
|
||||
assertThat(fopDeposit).contains("ns1.dein.fop");
|
||||
assertThat(fopDeposit).contains("ns1.kuss.lol");
|
||||
|
||||
String lolDeposit = readXml("lol_2000-01-01_full_S1_R0.xml.ghostryde");
|
||||
assertThat(lolDeposit).contains("ns1.dein.fop");
|
||||
assertThat(lolDeposit).contains("ns1.kuss.lol");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_rewindCursor_resendsDepositAtHigherRevision() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("fop");
|
||||
makeHostResource(clock, "ns1.dein.fop", "a:fed::cafe");
|
||||
|
||||
clock.setTo(DateTime.parse("2000-01-01TZ"));
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
XjcRdeDeposit deposit = unmarshal(
|
||||
XjcRdeDeposit.class,
|
||||
readXml("fop_2000-01-01_full_S1_R0.xml.ghostryde").getBytes(UTF_8));
|
||||
assertThat(deposit.getResend()).isEqualTo(0);
|
||||
|
||||
setCursor(Registry.get("fop"), RDE_STAGING, DateTime.parse("2000-01-01TZ"));
|
||||
action.response = new FakeResponse();
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
deposit = unmarshal(
|
||||
XjcRdeDeposit.class, readXml("fop_2000-01-01_full_S1_R1.xml.ghostryde").getBytes(UTF_8));
|
||||
assertThat(deposit.getResend()).isEqualTo(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_brdaDeposit_doesntIncludeHostsOrContacts() throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
createTldWithEscrowEnabled("xn--q9jyb4c");
|
||||
makeHostResource(clock, "ns1.bofh.みんな", "dead:fed::cafe");
|
||||
makeContactResource(clock, "123-IRL", "raven", "edgar@allen.みんな");
|
||||
setCursor(Registry.get("xn--q9jyb4c"), RDE_STAGING, DateTime.parse("2000-01-04TZ"));
|
||||
setCursor(Registry.get("xn--q9jyb4c"), BRDA, DateTime.parse("2000-01-04TZ"));
|
||||
|
||||
clock.setTo(DateTime.parse("2000-01-04TZ")); // Tuesday
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
String rdeDeposit = readXml("xn--q9jyb4c_2000-01-04_full_S1_R0.xml.ghostryde");
|
||||
assertThat(rdeDeposit).contains("<rdeHost:name>ns1.bofh.xn--q9jyb4c");
|
||||
assertThat(rdeDeposit).contains("<rdeContact:email>edgar@allen.みんな");
|
||||
|
||||
String brdaDeposit = readXml("xn--q9jyb4c_2000-01-04_thin_S1_R0.xml.ghostryde");
|
||||
assertThat(brdaDeposit).doesNotContain("<rdeHost:name>ns1.bofh.xn--q9jyb4c");
|
||||
assertThat(brdaDeposit).doesNotContain("<rdeContact:email>edgar@allen.みんな");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_catchUpCursor_doesPointInTime() throws Exception {
|
||||
// Do nothing on the first day.
|
||||
clock.setTo(DateTime.parse("1984-12-17T12:00Z"));
|
||||
createTldWithEscrowEnabled("lol");
|
||||
setCursor(Registry.get("lol"), RDE_STAGING, DateTime.parse("1984-12-18TZ"));
|
||||
|
||||
// Create the host resource on the second day.
|
||||
clock.setTo(DateTime.parse("1984-12-18T12:00Z"));
|
||||
HostResource ns1 = makeHostResource(clock, "ns1.justine.lol", "feed::a:bee");
|
||||
|
||||
// Modify it on the third day.
|
||||
clock.setTo(DateTime.parse("1984-12-19T12:00Z"));
|
||||
persistResourceWithCommitLog(
|
||||
ns1.asBuilder()
|
||||
.setInetAddresses(ImmutableSet.of(InetAddresses.forString("dead:beef::cafe")))
|
||||
.build());
|
||||
|
||||
// It's now the future. Let's catch up that cursor.
|
||||
clock.setTo(DateTime.parse("1990-01-01TZ"));
|
||||
|
||||
// First mapreduce shouldn't emit host because it didn't exist.
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
String firstDeposit = readXml("lol_1984-12-18_full_S1_R0.xml.ghostryde");
|
||||
assertThat(firstDeposit).doesNotContain("ns1.justine.lol");
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Cursor.createKey(RDE_STAGING, Registry.get("lol")))
|
||||
.now()
|
||||
.getCursorTime())
|
||||
.isEqualTo(DateTime.parse("1984-12-19TZ"));
|
||||
|
||||
// Second mapreduce should emit the old version of host.
|
||||
action.response = new FakeResponse();
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
String secondDeposit = readXml("lol_1984-12-19_full_S1_R0.xml.ghostryde");
|
||||
assertThat(secondDeposit).contains("ns1.justine.lol");
|
||||
assertThat(secondDeposit).contains("feed::a:bee");
|
||||
assertThat(secondDeposit).doesNotContain("dead:beef::cafe");
|
||||
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Cursor.createKey(RDE_STAGING, Registry.get("lol")))
|
||||
.now()
|
||||
.getCursorTime())
|
||||
.isEqualTo(DateTime.parse("1984-12-20TZ"));
|
||||
|
||||
// Third mapreduce emits current version of host.
|
||||
action.response = new FakeResponse();
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
String thirdDeposit = readXml("lol_1984-12-20_full_S1_R0.xml.ghostryde");
|
||||
assertThat(thirdDeposit).contains("ns1.justine.lol");
|
||||
assertThat(thirdDeposit).doesNotContain("feed::a:bee");
|
||||
assertThat(thirdDeposit).contains("dead:beef::cafe");
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Cursor.createKey(RDE_STAGING, Registry.get("lol")))
|
||||
.now()
|
||||
.getCursorTime())
|
||||
.isEqualTo(DateTime.parse("1984-12-21TZ"));
|
||||
}
|
||||
|
||||
private void doManualModeMapReduceTest(int revision, ImmutableSet<String> tlds) throws Exception {
|
||||
clock.setTo(DateTime.parse("1999-12-31TZ"));
|
||||
for (String tld : tlds) {
|
||||
createTldWithEscrowEnabled(tld);
|
||||
makeDomainBase(clock, tld);
|
||||
setCursor(Registry.get(tld), RDE_STAGING, DateTime.parse("1999-01-01TZ"));
|
||||
setCursor(Registry.get(tld), BRDA, DateTime.parse("2001-01-01TZ"));
|
||||
}
|
||||
|
||||
action.manual = true;
|
||||
action.directory = Optional.of("test/");
|
||||
action.modeStrings = ImmutableSet.of("full", "thin");
|
||||
action.tlds = tlds;
|
||||
action.watermarks =
|
||||
ImmutableSet.of(DateTime.parse("2000-01-01TZ"), DateTime.parse("2000-01-02TZ"));
|
||||
action.revision = Optional.of(revision);
|
||||
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce", clock);
|
||||
|
||||
ImmutableList<String> filenames = gcsUtils.listFolderObjects("rde-bucket", "manual/test/");
|
||||
for (String tld : tlds) {
|
||||
assertThat(filenames)
|
||||
.containsAtLeast(
|
||||
tld + "_2000-01-01_full_S1_R" + revision + "-report.xml.ghostryde",
|
||||
tld + "_2000-01-01_full_S1_R" + revision + ".xml.ghostryde",
|
||||
tld + "_2000-01-01_full_S1_R" + revision + ".xml.length",
|
||||
tld + "_2000-01-01_thin_S1_R" + revision + ".xml.ghostryde",
|
||||
tld + "_2000-01-01_thin_S1_R" + revision + ".xml.length",
|
||||
tld + "_2000-01-02_full_S1_R" + revision + "-report.xml.ghostryde",
|
||||
tld + "_2000-01-02_full_S1_R" + revision + ".xml.ghostryde",
|
||||
tld + "_2000-01-02_full_S1_R" + revision + ".xml.length",
|
||||
tld + "_2000-01-02_thin_S1_R" + revision + ".xml.ghostryde",
|
||||
tld + "_2000-01-02_thin_S1_R" + revision + ".xml.length");
|
||||
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Cursor.createKey(RDE_STAGING, Registry.get(tld)))
|
||||
.now()
|
||||
.getCursorTime())
|
||||
.isEqualTo(DateTime.parse("1999-01-01TZ"));
|
||||
assertThat(
|
||||
auditedOfy()
|
||||
.load()
|
||||
.key(Cursor.createKey(BRDA, Registry.get(tld)))
|
||||
.now()
|
||||
.getCursorTime())
|
||||
.isEqualTo(DateTime.parse("2001-01-01TZ"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_manualMode_generatesCorrectDepositsWithoutAdvancingCursors() throws Exception {
|
||||
doManualModeMapReduceTest(0, ImmutableSet.of("lol"));
|
||||
XmlTestUtils.assertXmlEquals(
|
||||
loadFile(getClass(), "testMapReduce_withDomain_producesExpectedXml.xml"),
|
||||
readXml("manual/test/lol_2000-01-01_full_S1_R0.xml.ghostryde"),
|
||||
"deposit.contents.registrar.crDate",
|
||||
"deposit.contents.registrar.upDate");
|
||||
XmlTestUtils.assertXmlEquals(
|
||||
loadFile(getClass(), "testMapReduce_withDomain_producesReportXml.xml"),
|
||||
readXml("manual/test/lol_2000-01-01_full_S1_R0-report.xml.ghostryde"),
|
||||
"deposit.contents.registrar.crDate",
|
||||
"deposit.contents.registrar.upDate");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMapReduce_manualMode_nonZeroRevisionAndMultipleTlds() throws Exception {
|
||||
doManualModeMapReduceTest(42, ImmutableSet.of("lol", "slug"));
|
||||
}
|
||||
|
||||
private String readXml(String objectName) throws IOException, PGPException {
|
||||
BlobId file = BlobId.of("rde-bucket", objectName);
|
||||
return new String(Ghostryde.decode(gcsUtils.readBytesFrom(file), decryptKey), UTF_8);
|
||||
}
|
||||
|
||||
private <T extends XjcRdeContentType>
|
||||
T extractAndRemoveContentWithType(Class<T> type, XjcRdeDeposit deposit) {
|
||||
for (JAXBElement<? extends XjcRdeContentType> content : deposit.getContents().getContents()) {
|
||||
XjcRdeContentType piece = content.getValue();
|
||||
if (type.isInstance(piece) && !alreadyExtracted.contains(piece)) {
|
||||
alreadyExtracted.add(piece);
|
||||
return type.cast(piece);
|
||||
}
|
||||
}
|
||||
throw new AssertionError("Expected deposit to contain another " + type.getSimpleName());
|
||||
}
|
||||
|
||||
private static void createTldWithEscrowEnabled(final String tld) {
|
||||
createTld(tld);
|
||||
persistResource(Registry.get(tld).asBuilder().setEscrowEnabled(true).build());
|
||||
}
|
||||
|
||||
private static ImmutableMap<String, Long> mapifyCounts(XjcRdeHeader header) {
|
||||
ImmutableMap.Builder<String, Long> builder = new ImmutableMap.Builder<>();
|
||||
for (XjcRdeHeaderCount count : header.getCounts()) {
|
||||
builder.put(count.getUri(), count.getValue());
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private void setCursor(
|
||||
final Registry registry, final CursorType cursorType, final DateTime value) {
|
||||
clock.advanceOneMilli();
|
||||
tm().transact(() -> tm().put(Cursor.create(cursorType, value, registry)));
|
||||
}
|
||||
|
||||
public static <T> T unmarshal(Class<T> clazz, byte[] xml) throws XmlException {
|
||||
return XjcXmlTransformer.unmarshal(clazz, new ByteArrayInputStream(xml));
|
||||
}
|
||||
}
|
||||
@@ -45,7 +45,7 @@ import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link RdeStagingAction} in Cloud SQL. */
|
||||
@DualDatabaseTest
|
||||
public class RdeStagingActionCloudSqlTest extends BeamActionTestBase {
|
||||
public class RdeStagingActionTest extends BeamActionTestBase {
|
||||
|
||||
private final FakeClock clock = new FakeClock();
|
||||
private final GcsUtils gcsUtils = new GcsUtils(LocalStorageHelper.getOptions());
|
||||
@@ -1,109 +0,0 @@
|
||||
// Copyright 2020 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.rde;
|
||||
|
||||
import static com.google.common.base.Preconditions.checkState;
|
||||
import static com.google.common.truth.Truth8.assertThat;
|
||||
import static google.registry.persistence.transaction.TransactionManagerFactory.tm;
|
||||
import static google.registry.testing.DatabaseHelper.persistNewRegistrar;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
import com.google.appengine.tools.mapreduce.MapperContext;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableSetMultimap;
|
||||
import google.registry.model.registrar.Registrar;
|
||||
import google.registry.model.registrar.Registrar.State;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import google.registry.xml.ValidationMode;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
/** Unit tests for {@link RdeStagingMapper}. */
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class RdeStagingMapperTest {
|
||||
|
||||
private static final Pattern REGISTRAR_NAME_PATTERN =
|
||||
Pattern.compile("<rdeRegistrar:name>(.*)</rdeRegistrar:name>");
|
||||
|
||||
@Mock PendingDeposit pendingDeposit;
|
||||
|
||||
@Mock MapperContext<PendingDeposit, DepositFragment> context;
|
||||
|
||||
private ArgumentCaptor<DepositFragment> depositFragmentCaptor =
|
||||
ArgumentCaptor.forClass(DepositFragment.class);
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@RegisterExtension
|
||||
AppEngineExtension appEngineExtension =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().build();
|
||||
|
||||
private RdeStagingMapper rdeStagingMapper;
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
// Two real registrars have been created by AppEngineExtension, named "New Registrar" and "The
|
||||
// Registrar". Create one included registrar (external_monitoring) and two excluded ones.
|
||||
Registrar monitoringRegistrar =
|
||||
persistNewRegistrar("monitoring", "monitoring", Registrar.Type.MONITORING, null);
|
||||
Registrar testRegistrar = persistNewRegistrar("test", "test", Registrar.Type.TEST, null);
|
||||
Registrar externalMonitoringRegistrar =
|
||||
persistNewRegistrar(
|
||||
"externalmonitor", "external_monitoring", Registrar.Type.EXTERNAL_MONITORING, 9997L);
|
||||
|
||||
// Set Registrar states which are required for reporting.
|
||||
tm().transact(
|
||||
() ->
|
||||
tm().putAll(
|
||||
ImmutableList.of(
|
||||
externalMonitoringRegistrar.asBuilder().setState(State.ACTIVE).build(),
|
||||
testRegistrar.asBuilder().setState(State.ACTIVE).build(),
|
||||
monitoringRegistrar.asBuilder().setState(State.ACTIVE).build())));
|
||||
|
||||
rdeStagingMapper =
|
||||
new RdeStagingMapper(ValidationMode.STRICT, ImmutableSetMultimap.of("1", pendingDeposit));
|
||||
rdeStagingMapper.setContext(context);
|
||||
}
|
||||
|
||||
@Test
|
||||
void registrars_ignoreMonitoringAndTestTypes() {
|
||||
rdeStagingMapper.map(null);
|
||||
verify(context, Mockito.times(3))
|
||||
.emit(any(PendingDeposit.class), depositFragmentCaptor.capture());
|
||||
assertThat(
|
||||
depositFragmentCaptor.getAllValues().stream()
|
||||
.map(RdeStagingMapperTest::findRegistrarName))
|
||||
.containsExactly("New Registrar", "The Registrar", "external_monitoring");
|
||||
}
|
||||
|
||||
private static String findRegistrarName(DepositFragment fragment) {
|
||||
Matcher matcher = REGISTRAR_NAME_PATTERN.matcher(fragment.xml());
|
||||
checkState(matcher.find(), "Missing registarName in xml.");
|
||||
return matcher.group(1);
|
||||
}
|
||||
}
|
||||
@@ -1,254 +0,0 @@
|
||||
// Copyright 2020 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.rde;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.model.rde.RdeMode.FULL;
|
||||
import static google.registry.model.rde.RdeMode.THIN;
|
||||
import static google.registry.persistence.transaction.TransactionManagerFactory.tm;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.util.ResourceUtils.readResourceUtf8;
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.google.appengine.tools.mapreduce.ReducerInput;
|
||||
import com.google.cloud.storage.BlobId;
|
||||
import com.google.cloud.storage.StorageException;
|
||||
import com.google.cloud.storage.contrib.nio.testing.LocalStorageHelper;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import google.registry.beam.rde.RdePipelineTest;
|
||||
import google.registry.gcs.GcsUtils;
|
||||
import google.registry.keyring.api.PgpHelper;
|
||||
import google.registry.model.common.Cursor;
|
||||
import google.registry.model.common.Cursor.CursorType;
|
||||
import google.registry.model.rde.RdeMode;
|
||||
import google.registry.model.rde.RdeRevision;
|
||||
import google.registry.model.tld.Registry;
|
||||
import google.registry.request.RequestParameters;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.CloudTasksHelper;
|
||||
import google.registry.testing.CloudTasksHelper.TaskMatcher;
|
||||
import google.registry.testing.FakeKeyringModule;
|
||||
import google.registry.testing.FakeLockHandler;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import google.registry.xml.ValidationMode;
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import org.bouncycastle.openpgp.PGPException;
|
||||
import org.bouncycastle.openpgp.PGPPrivateKey;
|
||||
import org.bouncycastle.openpgp.PGPPublicKey;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Duration;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link RdeStagingReducer}. */
|
||||
class RdeStagingReducerTest {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@RegisterExtension
|
||||
AppEngineExtension appEngineExtension =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().withTaskQueue().build();
|
||||
|
||||
private static final String GCS_BUCKET = "test-rde-bucket";
|
||||
private static final GcsUtils gcsUtils = new GcsUtils(LocalStorageHelper.getOptions());
|
||||
private static final PGPPrivateKey decryptionKey =
|
||||
new FakeKeyringModule().get().getRdeStagingDecryptionKey();
|
||||
private static final PGPPublicKey encryptionKey =
|
||||
new FakeKeyringModule().get().getRdeStagingEncryptionKey();
|
||||
private static final DateTime now = DateTime.parse("2000-01-01TZ");
|
||||
private final CloudTasksHelper cloudTasksHelper = new CloudTasksHelper();
|
||||
|
||||
private Fragments brdaFragments =
|
||||
new Fragments(
|
||||
ImmutableList.of(
|
||||
DepositFragment.create(RdeResourceType.DOMAIN, "<rdeDomain:domain/>\n", ""),
|
||||
DepositFragment.create(
|
||||
RdeResourceType.REGISTRAR, "<rdeRegistrar:registrar/>\n", "")));
|
||||
|
||||
private Fragments rdeFragments =
|
||||
new Fragments(
|
||||
ImmutableList.of(
|
||||
DepositFragment.create(RdeResourceType.DOMAIN, "<rdeDomain:domain/>\n", ""),
|
||||
DepositFragment.create(RdeResourceType.REGISTRAR, "<rdeRegistrar:registrar/>\n", ""),
|
||||
DepositFragment.create(RdeResourceType.CONTACT, "<rdeContact:contact/>\n", ""),
|
||||
DepositFragment.create(RdeResourceType.HOST, "<rdeHost:host/>\n", "")));
|
||||
|
||||
private PendingDeposit key;
|
||||
|
||||
private RdeStagingReducer reducer =
|
||||
new RdeStagingReducer(
|
||||
cloudTasksHelper.getTestCloudTasksUtils(),
|
||||
new FakeLockHandler(true),
|
||||
GCS_BUCKET,
|
||||
Duration.ZERO,
|
||||
PgpHelper.convertPublicKeyToBytes(encryptionKey),
|
||||
ValidationMode.STRICT,
|
||||
gcsUtils);
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
createTld("soy");
|
||||
tm().transact(
|
||||
() -> {
|
||||
tm().put(Cursor.create(CursorType.BRDA, now, Registry.get("soy")));
|
||||
tm().put(Cursor.create(CursorType.RDE_STAGING, now, Registry.get("soy")));
|
||||
RdeRevision.saveRevision("soy", now, THIN, 0);
|
||||
RdeRevision.saveRevision("soy", now, FULL, 0);
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_BRDA() throws Exception {
|
||||
key = PendingDeposit.create("soy", now, THIN, CursorType.BRDA, Duration.standardDays(1));
|
||||
reducer.reduce(key, brdaFragments);
|
||||
String outputFile = decryptGhostrydeGcsFile("soy_2000-01-01_thin_S1_R1.xml.ghostryde");
|
||||
assertThat(outputFile)
|
||||
.isEqualTo(
|
||||
readResourceUtf8(RdePipelineTest.class, "reducer_brda.xml")
|
||||
.replace("%RESEND%", " resend=\"1\""));
|
||||
compareLength(outputFile, "soy_2000-01-01_thin_S1_R1.xml.length");
|
||||
// BRDA doesn't write a report file.
|
||||
assertThrows(
|
||||
StorageException.class,
|
||||
() ->
|
||||
gcsUtils.readBytesFrom(
|
||||
BlobId.of(GCS_BUCKET, "soy_2000-01-01_thin_S1_R1-report.xml.ghostryde")));
|
||||
assertThat(loadCursorTime(CursorType.BRDA))
|
||||
.isEquivalentAccordingToCompareTo(now.plus(Duration.standardDays(1)));
|
||||
assertThat(loadRevision(THIN)).isEqualTo(1);
|
||||
cloudTasksHelper.assertTasksEnqueued(
|
||||
"brda",
|
||||
new TaskMatcher()
|
||||
.url(BrdaCopyAction.PATH)
|
||||
.param(RequestParameters.PARAM_TLD, "soy")
|
||||
.param(RdeModule.PARAM_WATERMARK, now.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_BRDA_manual() throws Exception {
|
||||
key = PendingDeposit.createInManualOperation("soy", now, THIN, "", 0);
|
||||
reducer.reduce(key, brdaFragments);
|
||||
String outputFile = decryptGhostrydeGcsFile("manual/soy_2000-01-01_thin_S1_R0.xml.ghostryde");
|
||||
assertThat(outputFile)
|
||||
.isEqualTo(
|
||||
readResourceUtf8(RdePipelineTest.class, "reducer_brda.xml").replace("%RESEND%", ""));
|
||||
compareLength(outputFile, "manual/soy_2000-01-01_thin_S1_R0.xml.length");
|
||||
// BRDA doesn't write a report file.
|
||||
assertThrows(
|
||||
StorageException.class,
|
||||
() ->
|
||||
gcsUtils.readBytesFrom(
|
||||
BlobId.of(GCS_BUCKET, "manual/soy_2000-01-01_thin_S1_R0-report.xml.ghostryde")));
|
||||
// No extra operations in manual mode.
|
||||
assertThat(loadCursorTime(CursorType.BRDA)).isEquivalentAccordingToCompareTo(now);
|
||||
assertThat(loadRevision(THIN)).isEqualTo(0);
|
||||
cloudTasksHelper.assertNoTasksEnqueued("brda");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_RDE() throws Exception {
|
||||
key = PendingDeposit.create("soy", now, FULL, CursorType.RDE_STAGING, Duration.standardDays(1));
|
||||
reducer.reduce(key, rdeFragments);
|
||||
String outputFile = decryptGhostrydeGcsFile("soy_2000-01-01_full_S1_R1.xml.ghostryde");
|
||||
assertThat(outputFile)
|
||||
.isEqualTo(
|
||||
readResourceUtf8(RdePipelineTest.class, "reducer_rde.xml")
|
||||
.replace("%RESEND%", " resend=\"1\""));
|
||||
compareLength(outputFile, "soy_2000-01-01_full_S1_R1.xml.length");
|
||||
assertThat(decryptGhostrydeGcsFile("soy_2000-01-01_full_S1_R1-report.xml.ghostryde"))
|
||||
.isEqualTo(
|
||||
readResourceUtf8(RdePipelineTest.class, "reducer_rde_report.xml")
|
||||
.replace("%RESEND%", "1"));
|
||||
assertThat(loadCursorTime(CursorType.RDE_STAGING))
|
||||
.isEquivalentAccordingToCompareTo(now.plus(Duration.standardDays(1)));
|
||||
assertThat(loadRevision(FULL)).isEqualTo(1);
|
||||
cloudTasksHelper.assertTasksEnqueued(
|
||||
"rde-upload",
|
||||
new TaskMatcher().url(RdeUploadAction.PATH).param(RequestParameters.PARAM_TLD, "soy"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_RDE_manual() throws Exception {
|
||||
key = PendingDeposit.createInManualOperation("soy", now, FULL, "", 0);
|
||||
reducer.reduce(key, rdeFragments);
|
||||
String outputFile = decryptGhostrydeGcsFile("manual/soy_2000-01-01_full_S1_R0.xml.ghostryde");
|
||||
assertThat(outputFile)
|
||||
.isEqualTo(
|
||||
readResourceUtf8(RdePipelineTest.class, "reducer_rde.xml").replace("%RESEND%", ""));
|
||||
compareLength(outputFile, "manual/soy_2000-01-01_full_S1_R0.xml.length");
|
||||
assertThat(decryptGhostrydeGcsFile("manual/soy_2000-01-01_full_S1_R0-report.xml.ghostryde"))
|
||||
.isEqualTo(
|
||||
readResourceUtf8(RdePipelineTest.class, "reducer_rde_report.xml")
|
||||
.replace("%RESEND%", "0"));
|
||||
// No extra operations in manual mode.
|
||||
assertThat(loadCursorTime(CursorType.RDE_STAGING)).isEquivalentAccordingToCompareTo(now);
|
||||
assertThat(loadRevision(FULL)).isEqualTo(0);
|
||||
cloudTasksHelper.assertNoTasksEnqueued("rde-upload");
|
||||
}
|
||||
|
||||
private static void compareLength(String outputFile, String lengthFilename) throws IOException {
|
||||
assertThat(String.valueOf(outputFile.getBytes(UTF_8).length))
|
||||
.isEqualTo(
|
||||
new String(gcsUtils.readBytesFrom(BlobId.of(GCS_BUCKET, lengthFilename)), UTF_8));
|
||||
}
|
||||
|
||||
private static DateTime loadCursorTime(CursorType type) {
|
||||
return auditedOfy()
|
||||
.load()
|
||||
.key(Cursor.createKey(type, Registry.get("soy")))
|
||||
.now()
|
||||
.getCursorTime();
|
||||
}
|
||||
|
||||
private static int loadRevision(RdeMode mode) {
|
||||
return auditedOfy()
|
||||
.load()
|
||||
.type(RdeRevision.class)
|
||||
.id("soy_2000-01-01_" + mode.getFilenameComponent())
|
||||
.now()
|
||||
.getRevision();
|
||||
}
|
||||
|
||||
private static String decryptGhostrydeGcsFile(String filename) throws IOException, PGPException {
|
||||
return new String(
|
||||
Ghostryde.decode(gcsUtils.readBytesFrom(BlobId.of(GCS_BUCKET, filename)), decryptionKey),
|
||||
UTF_8);
|
||||
}
|
||||
|
||||
private static class Fragments extends ReducerInput<DepositFragment> {
|
||||
private final Iterator<DepositFragment> iterator;
|
||||
|
||||
Fragments(Iterable<DepositFragment> iterable) {
|
||||
this.iterator = iterable.iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return iterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DepositFragment next() {
|
||||
return iterator.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -27,8 +27,7 @@ import org.junit.runner.RunWith;
|
||||
GhostrydeGpgIntegrationTest.class,
|
||||
GhostrydeTest.class,
|
||||
HostResourceToXjcConverterTest.class,
|
||||
RdeStagingActionDatastoreTest.class,
|
||||
RdeStagingActionCloudSqlTest.class,
|
||||
RdeStagingActionTest.class,
|
||||
RdeUploadActionTest.class,
|
||||
RdeReportActionTest.class,
|
||||
RegistrarToXjcConverterTest.class,
|
||||
|
||||
@@ -976,14 +976,14 @@ public class DatabaseHelper {
|
||||
* {@link ForeignKeyIndex}.
|
||||
*
|
||||
* <p><b>Note:</b> Your resource will not be enrolled in a commit log. If you want backups, use
|
||||
* {@link #persistResourceWithCommitLog(Object)}.
|
||||
* {@link #persistResourceWithBackup(Object)}.
|
||||
*/
|
||||
public static <R extends ImmutableObject> R persistResource(final R resource) {
|
||||
return persistResource(resource, false);
|
||||
}
|
||||
|
||||
/** Same as {@link #persistResource(Object)} with backups enabled. */
|
||||
public static <R extends ImmutableObject> R persistResourceWithCommitLog(final R resource) {
|
||||
public static <R extends ImmutableObject> R persistResourceWithBackup(final R resource) {
|
||||
return persistResource(resource, true);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,250 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.testing.mapreduce;
|
||||
|
||||
import static google.registry.config.RegistryConfig.getEppResourceIndexBucketCount;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import com.google.appengine.api.blobstore.dev.LocalBlobstoreService;
|
||||
import com.google.appengine.api.modules.ModulesService;
|
||||
import com.google.appengine.api.taskqueue.dev.LocalTaskQueue;
|
||||
import com.google.appengine.api.taskqueue.dev.QueueStateInfo;
|
||||
import com.google.appengine.api.taskqueue.dev.QueueStateInfo.HeaderWrapper;
|
||||
import com.google.appengine.tools.development.ApiProxyLocal;
|
||||
import com.google.appengine.tools.development.testing.LocalTaskQueueTestConfig;
|
||||
import com.google.appengine.tools.mapreduce.MapReduceServlet;
|
||||
import com.google.appengine.tools.pipeline.impl.servlets.PipelineServlet;
|
||||
import com.google.appengine.tools.pipeline.impl.servlets.TaskHandler;
|
||||
import com.google.apphosting.api.ApiProxy;
|
||||
import com.google.common.base.CharMatcher;
|
||||
import com.google.common.base.Splitter;
|
||||
import com.google.common.flogger.FluentLogger;
|
||||
import google.registry.mapreduce.MapreduceRunner;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.util.AppEngineServiceUtils;
|
||||
import google.registry.util.AppEngineServiceUtilsImpl;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import javax.annotation.Nullable;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.mockito.junit.jupiter.MockitoSettings;
|
||||
import org.mockito.quality.Strictness;
|
||||
|
||||
/**
|
||||
* Base test class for mapreduces.
|
||||
*
|
||||
* <p>Adapted from EndToEndTestCase with some modifications that allow it to work with Nomulus, most
|
||||
* notably inside knowledge of our routing paths and our Datastore/Task Queue configurations.
|
||||
*
|
||||
* <p>See
|
||||
* https://github.com/GoogleCloudPlatform/appengine-mapreduce/blob/master/java/src/test/java/com/google/appengine/tools/mapreduce/EndToEndTestCase.java
|
||||
*
|
||||
* @param <T> The type of the Action class that implements the mapreduce.
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
@MockitoSettings(strictness = Strictness.LENIENT)
|
||||
public abstract class MapreduceTestCase<T> {
|
||||
|
||||
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
|
||||
|
||||
protected T action;
|
||||
|
||||
private final MapReduceServlet mrServlet = new MapReduceServlet();
|
||||
private final PipelineServlet pipelineServlet = new PipelineServlet();
|
||||
private LocalTaskQueue taskQueue;
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder()
|
||||
.withDatastoreAndCloudSql()
|
||||
.withLocalModules()
|
||||
.withTaskQueue()
|
||||
.build();
|
||||
|
||||
private AppEngineServiceUtils appEngineServiceUtils;
|
||||
|
||||
@Mock ModulesService modulesService;
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEachMapreduceTestCase() {
|
||||
taskQueue = LocalTaskQueueTestConfig.getLocalTaskQueue();
|
||||
ApiProxyLocal proxy = (ApiProxyLocal) ApiProxy.getDelegate();
|
||||
// Creating files is not allowed in some test execution environments, so don't.
|
||||
proxy.setProperty(LocalBlobstoreService.NO_STORAGE_PROPERTY, "true");
|
||||
appEngineServiceUtils = new AppEngineServiceUtilsImpl(modulesService);
|
||||
Mockito.when(modulesService.getVersionHostname("backend", null))
|
||||
.thenReturn("version.backend.projectid.appspot.com");
|
||||
}
|
||||
|
||||
protected MapreduceRunner makeDefaultRunner() {
|
||||
return new MapreduceRunner(
|
||||
Optional.of(getEppResourceIndexBucketCount()), Optional.of(1), appEngineServiceUtils);
|
||||
}
|
||||
|
||||
protected List<QueueStateInfo.TaskStateInfo> getTasks(String queueName) {
|
||||
return taskQueue.getQueueStateInfo().get(queueName).getTaskInfo();
|
||||
}
|
||||
|
||||
private void executeTask(String queueName, QueueStateInfo.TaskStateInfo taskStateInfo)
|
||||
throws Exception {
|
||||
logger.atFine().log(
|
||||
"Executing task %s with URL %s", taskStateInfo.getTaskName(), taskStateInfo.getUrl());
|
||||
// Hack to allow for deferred tasks. Exploits knowing how they work.
|
||||
if (taskStateInfo.getUrl().endsWith("__deferred__")) {
|
||||
ObjectInputStream oin =
|
||||
new ObjectInputStream(new ByteArrayInputStream(taskStateInfo.getBodyAsBytes()));
|
||||
Runnable object = (Runnable) oin.readObject();
|
||||
object.run();
|
||||
return;
|
||||
}
|
||||
HttpServletRequest request = mock(HttpServletRequest.class);
|
||||
HttpServletResponse response = mock(HttpServletResponse.class);
|
||||
|
||||
// Strip off routing paths that are handled in web.xml in non-test scenarios.
|
||||
String pathInfo = taskStateInfo.getUrl();
|
||||
if (pathInfo.startsWith("/_dr/mapreduce/")) {
|
||||
pathInfo = pathInfo.replace("/_dr/mapreduce", "");
|
||||
} else if (pathInfo.startsWith("/mapreduce/")) {
|
||||
pathInfo = pathInfo.replace("/mapreduce", "");
|
||||
} else if (pathInfo.startsWith("/")) {
|
||||
pathInfo = pathInfo.replace("/_ah/", "");
|
||||
pathInfo = pathInfo.substring(pathInfo.indexOf('/'));
|
||||
} else {
|
||||
pathInfo = "/" + pathInfo;
|
||||
}
|
||||
when(request.getPathInfo()).thenReturn(pathInfo);
|
||||
when(request.getHeader("X-AppEngine-QueueName")).thenReturn(queueName);
|
||||
when(request.getHeader("X-AppEngine-TaskName")).thenReturn(taskStateInfo.getTaskName());
|
||||
// Pipeline looks at this header but uses the value only for diagnostic messages
|
||||
when(request.getIntHeader(TaskHandler.TASK_RETRY_COUNT_HEADER)).thenReturn(-1);
|
||||
for (HeaderWrapper header : taskStateInfo.getHeaders()) {
|
||||
int value = parseAsQuotedInt(header.getValue());
|
||||
Mockito.when(request.getIntHeader(header.getKey())).thenReturn(value);
|
||||
logger.atFine().log("header: %s=%s", header.getKey(), header.getValue());
|
||||
Mockito.when(request.getHeader(header.getKey())).thenReturn(header.getValue());
|
||||
}
|
||||
|
||||
Map<String, String> parameters = decodeParameters(taskStateInfo.getBody());
|
||||
for (String name : parameters.keySet()) {
|
||||
when(request.getParameter(name)).thenReturn(parameters.get(name));
|
||||
}
|
||||
when(request.getParameterNames()).thenReturn(Collections.enumeration(parameters.keySet()));
|
||||
|
||||
if (taskStateInfo.getMethod().equals("POST")) {
|
||||
if (taskStateInfo.getUrl().startsWith(PipelineServlet.BASE_URL)) {
|
||||
pipelineServlet.doPost(request, response);
|
||||
} else {
|
||||
mrServlet.doPost(request, response);
|
||||
}
|
||||
} else {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
||||
private int parseAsQuotedInt(String str) {
|
||||
try {
|
||||
return Integer.parseInt(CharMatcher.is('"').trimFrom(str));
|
||||
} catch (NumberFormatException e) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes tasks in the mapreduce queue until all are finished.
|
||||
*
|
||||
* <p>If you are mocking a clock in your tests, use the
|
||||
* {@link #executeTasksUntilEmpty(String, FakeClock)} version instead.
|
||||
*/
|
||||
protected void executeTasksUntilEmpty(String queueName) throws Exception {
|
||||
executeTasksUntilEmpty(queueName, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes mapreduce tasks, increment the clock between each task.
|
||||
*
|
||||
* <p>Incrementing the clock between tasks is important if tasks have transactions inside the
|
||||
* mapper or reducer, which don't have access to the fake clock.
|
||||
*/
|
||||
protected void executeTasksUntilEmpty(String queueName, @Nullable FakeClock clock)
|
||||
throws Exception {
|
||||
executeTasks(queueName, clock, Optional.empty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes mapreduce tasks, increment the clock between each task.
|
||||
*
|
||||
* <p>Incrementing the clock between tasks is important if tasks have transactions inside the
|
||||
* mapper or reducer, which don't have access to the fake clock.
|
||||
*
|
||||
* <p>The maxTasks parameter determines how many tasks (at most) will be run. If maxTasks is
|
||||
* absent(), all tasks are run until the queue is empty. If maxTasks is zero, no tasks are run.
|
||||
*/
|
||||
private void executeTasks(String queueName, @Nullable FakeClock clock, Optional<Integer> maxTasks)
|
||||
throws Exception {
|
||||
for (int numTasksDeleted = 0;
|
||||
!maxTasks.isPresent() || (numTasksDeleted < maxTasks.get());
|
||||
numTasksDeleted++) {
|
||||
auditedOfy().clearSessionCache();
|
||||
// We have to re-acquire task list every time, because local implementation returns a copy.
|
||||
List<QueueStateInfo.TaskStateInfo> taskInfo =
|
||||
taskQueue.getQueueStateInfo().get(queueName).getTaskInfo();
|
||||
if (taskInfo.isEmpty()) {
|
||||
break;
|
||||
}
|
||||
QueueStateInfo.TaskStateInfo taskStateInfo = taskInfo.get(0);
|
||||
taskQueue.deleteTask(queueName, taskStateInfo.getTaskName());
|
||||
executeTask(queueName, taskStateInfo);
|
||||
if (clock != null) {
|
||||
clock.advanceOneMilli();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sadly there's no way to parse query string with JDK. This is a good enough approximation.
|
||||
private static Map<String, String> decodeParameters(String requestBody)
|
||||
throws UnsupportedEncodingException {
|
||||
Map<String, String> result = new HashMap<>();
|
||||
|
||||
Iterable<String> params = Splitter.on('&').split(requestBody);
|
||||
for (String param : params) {
|
||||
List<String> pair = Splitter.on('=').splitToList(param);
|
||||
String name = pair.get(0);
|
||||
String value = URLDecoder.decode(pair.get(1), "UTF-8");
|
||||
if (result.containsKey(name)) {
|
||||
throw new IllegalArgumentException("Duplicate parameter: " + requestBody);
|
||||
}
|
||||
result.put(name, value);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -1,114 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.tools;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
|
||||
import com.google.common.io.Resources;
|
||||
import google.registry.model.annotations.DeleteAfterMigration;
|
||||
import google.registry.testing.DatastoreEntityExtension;
|
||||
import google.registry.tools.EntityWrapper.Property;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
|
||||
@DeleteAfterMigration
|
||||
public class CompareDbBackupsTest {
|
||||
|
||||
private static final int BASE_ID = 1001;
|
||||
|
||||
// Capture standard output.
|
||||
private final ByteArrayOutputStream stdout = new ByteArrayOutputStream();
|
||||
private PrintStream orgStdout;
|
||||
|
||||
@TempDir Path tmpDir;
|
||||
|
||||
@RegisterExtension
|
||||
public DatastoreEntityExtension datastoreEntityExtension = new DatastoreEntityExtension();
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
orgStdout = System.out;
|
||||
System.setOut(new PrintStream(stdout));
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
void afterEach() {
|
||||
System.setOut(orgStdout);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testLoadBackup() {
|
||||
URL backupRootFolder = Resources.getResource("google/registry/tools/datastore-export");
|
||||
CompareDbBackups.main(new String[] {backupRootFolder.getPath(), backupRootFolder.getPath()});
|
||||
String output = new String(stdout.toByteArray(), UTF_8);
|
||||
assertThat(output).contains("Both sets have the same 41 entities");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCompareBackups() throws Exception {
|
||||
// Create two directories corresponding to data dumps.
|
||||
Path dump1 = Files.createDirectory(tmpDir.resolve("dump1"));
|
||||
Path dump2 = Files.createDirectory(tmpDir.resolve("dump2"));
|
||||
|
||||
LevelDbFileBuilder builder = new LevelDbFileBuilder(new File(dump1.toFile(), "output-data1"));
|
||||
builder.addEntity(
|
||||
EntityWrapper.from(
|
||||
BASE_ID,
|
||||
Property.create("eeny", 100L),
|
||||
Property.create("meeny", 200L),
|
||||
Property.create("miney", 300L))
|
||||
.getEntity());
|
||||
builder.addEntity(
|
||||
EntityWrapper.from(
|
||||
BASE_ID + 1,
|
||||
Property.create("moxey", 100L),
|
||||
Property.create("minney", 200L),
|
||||
Property.create("motz", 300L))
|
||||
.getEntity());
|
||||
builder.build();
|
||||
|
||||
builder = new LevelDbFileBuilder(new File(dump2.toFile(), "output-data2"));
|
||||
builder.addEntity(
|
||||
EntityWrapper.from(
|
||||
BASE_ID + 1,
|
||||
Property.create("moxey", 100L),
|
||||
Property.create("minney", 200L),
|
||||
Property.create("motz", 300L))
|
||||
.getEntity());
|
||||
builder.addEntity(
|
||||
EntityWrapper.from(
|
||||
BASE_ID + 2,
|
||||
Property.create("blutzy", 100L),
|
||||
Property.create("fishey", 200L),
|
||||
Property.create("strutz", 300L))
|
||||
.getEntity());
|
||||
builder.build();
|
||||
|
||||
CompareDbBackups.main(new String[] {dump1.toString(), dump2.toString()});
|
||||
String output = new String(stdout.toByteArray(), UTF_8);
|
||||
assertThat(output)
|
||||
.containsMatch("(?s)1 records were removed.*eeny.*1 records were added.*blutzy");
|
||||
}
|
||||
}
|
||||
@@ -1,235 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.tools;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveContact;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveDomain;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveHost;
|
||||
import static google.registry.testing.DatabaseHelper.persistDeletedContact;
|
||||
import static google.registry.testing.DatabaseHelper.persistDeletedDomain;
|
||||
import static google.registry.testing.DatabaseHelper.persistDeletedHost;
|
||||
import static org.joda.time.DateTimeZone.UTC;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
import com.beust.jcommander.ParameterException;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link GetResourceByKeyCommand}. */
|
||||
class GetResourceByKeyCommandTest extends CommandTestCase<GetResourceByKeyCommand> {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
private DateTime now = DateTime.now(UTC);
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
createTld("tld");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_domain() throws Exception {
|
||||
persistActiveDomain("example.tld");
|
||||
runCommand("agR0ZXN0chULEgpEb21haW5CYXNlIgUyLVRMRAw");
|
||||
assertInStdout("fullyQualifiedDomainName=example.tld");
|
||||
assertInStdout("contact=Key<?>(ContactResource(\"3-ROID\"))");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_domain_expand() throws Exception {
|
||||
persistActiveDomain("example.tld");
|
||||
runCommand("agR0ZXN0chULEgpEb21haW5CYXNlIgUyLVRMRAw", "--expand");
|
||||
assertInStdout("fullyQualifiedDomainName=example.tld");
|
||||
assertInStdout("contactId=contact1234");
|
||||
assertNotInStdout("LiveRef");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_domain_multipleArguments() throws Exception {
|
||||
persistActiveDomain("example.tld");
|
||||
persistActiveDomain("example2.tld");
|
||||
runCommand(
|
||||
"agR0ZXN0chULEgpEb21haW5CYXNlIgUyLVRMRAw", "agR0ZXN0chULEgpEb21haW5CYXNlIgU0LVRMRAw");
|
||||
assertInStdout("fullyQualifiedDomainName=example.tld");
|
||||
assertInStdout("fullyQualifiedDomainName=example2.tld");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailure_domain_oneDoesNotExist() {
|
||||
persistActiveDomain("example.tld");
|
||||
NullPointerException thrown =
|
||||
assertThrows(
|
||||
NullPointerException.class,
|
||||
() ->
|
||||
runCommand(
|
||||
"agR0ZXN0chULEgpEb21haW5CYXNlIgUyLVRMRAw",
|
||||
"agR0ZXN0chULEgpEb21haW5CYXNlIgU0LVRMRAw"));
|
||||
assertThat(thrown)
|
||||
.hasMessageThat()
|
||||
.contains("Could not load resource for key: VKey<DomainBase>(sql:4-TLD,ofy:4-TLD)");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_deletedDomain() throws Exception {
|
||||
persistDeletedDomain("example.tld", now.minusDays(1));
|
||||
runCommand("agR0ZXN0chULEgpEb21haW5CYXNlIgUyLVRMRAw");
|
||||
assertInStdout("fullyQualifiedDomainName=example.tld");
|
||||
assertInStdout("deletionTime=" + now.minusDays(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_contact() throws Exception {
|
||||
persistActiveContact("sh8013");
|
||||
runCommand("agR0ZXN0chsLEg9Db250YWN0UmVzb3VyY2UiBjItUk9JRAw");
|
||||
assertInStdout("contactId=sh8013");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_contact_expand() throws Exception {
|
||||
persistActiveContact("sh8013");
|
||||
runCommand("agR0ZXN0chsLEg9Db250YWN0UmVzb3VyY2UiBjItUk9JRAw", "--expand");
|
||||
assertInStdout("contactId=sh8013");
|
||||
assertNotInStdout("LiveRef");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_contact_multipleArguments() throws Exception {
|
||||
persistActiveContact("sh8013");
|
||||
persistActiveContact("jd1234");
|
||||
runCommand(
|
||||
"agR0ZXN0chsLEg9Db250YWN0UmVzb3VyY2UiBjItUk9JRAw",
|
||||
"agR0ZXN0chsLEg9Db250YWN0UmVzb3VyY2UiBjMtUk9JRAw");
|
||||
assertInStdout("contactId=sh8013");
|
||||
assertInStdout("contactId=jd1234");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailure_contact_oneDoesNotExist() {
|
||||
persistActiveContact("sh8013");
|
||||
NullPointerException thrown =
|
||||
assertThrows(
|
||||
NullPointerException.class,
|
||||
() ->
|
||||
runCommand(
|
||||
"agR0ZXN0chsLEg9Db250YWN0UmVzb3VyY2UiBjItUk9JRAw",
|
||||
"agR0ZXN0chsLEg9Db250YWN0UmVzb3VyY2UiBjMtUk9JRAw"));
|
||||
assertThat(thrown)
|
||||
.hasMessageThat()
|
||||
.contains("Could not load resource for key: VKey<ContactResource>(sql:3-ROID,ofy:3-ROID)");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_deletedContact() throws Exception {
|
||||
persistDeletedContact("sh8013", now.minusDays(1));
|
||||
runCommand("agR0ZXN0chsLEg9Db250YWN0UmVzb3VyY2UiBjItUk9JRAw");
|
||||
assertInStdout("contactId=sh8013");
|
||||
assertInStdout("deletionTime=" + now.minusDays(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_host() throws Exception {
|
||||
persistActiveHost("ns1.example.tld");
|
||||
runCommand("agR0ZXN0chgLEgxIb3N0UmVzb3VyY2UiBjItUk9JRAw");
|
||||
assertInStdout("fullyQualifiedHostName=ns1.example.tld");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_host_expand() throws Exception {
|
||||
persistActiveHost("ns1.example.tld");
|
||||
runCommand("agR0ZXN0chgLEgxIb3N0UmVzb3VyY2UiBjItUk9JRAw", "--expand");
|
||||
assertInStdout("fullyQualifiedHostName=ns1.example.tld");
|
||||
assertNotInStdout("LiveRef");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_host_multipleArguments() throws Exception {
|
||||
persistActiveHost("ns1.example.tld");
|
||||
persistActiveHost("ns2.example.tld");
|
||||
runCommand(
|
||||
"agR0ZXN0chgLEgxIb3N0UmVzb3VyY2UiBjItUk9JRAw",
|
||||
"agR0ZXN0chgLEgxIb3N0UmVzb3VyY2UiBjMtUk9JRAw");
|
||||
assertInStdout("fullyQualifiedHostName=ns1.example.tld");
|
||||
assertInStdout("fullyQualifiedHostName=ns2.example.tld");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailure_host_oneDoesNotExist() {
|
||||
persistActiveHost("ns1.example.tld");
|
||||
NullPointerException thrown =
|
||||
assertThrows(
|
||||
NullPointerException.class,
|
||||
() ->
|
||||
runCommand(
|
||||
"agR0ZXN0chgLEgxIb3N0UmVzb3VyY2UiBjItUk9JRAw",
|
||||
"agR0ZXN0chgLEgxIb3N0UmVzb3VyY2UiBjMtUk9JRAw"));
|
||||
assertThat(thrown)
|
||||
.hasMessageThat()
|
||||
.contains("Could not load resource for key: VKey<HostResource>(sql:3-ROID,ofy:3-ROID)");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_deletedHost() throws Exception {
|
||||
persistDeletedHost("ns1.example.tld", now.minusDays(1));
|
||||
runCommand("agR0ZXN0chgLEgxIb3N0UmVzb3VyY2UiBjItUk9JRAw");
|
||||
assertInStdout("fullyQualifiedHostName=ns1.example.tld");
|
||||
assertInStdout("deletionTime=" + now.minusDays(1));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_mixedTypes() throws Exception {
|
||||
persistActiveDomain("example.tld");
|
||||
persistActiveContact("sh8013");
|
||||
persistActiveHost("ns1.example.tld");
|
||||
runCommand(
|
||||
"agR0ZXN0chULEgpEb21haW5CYXNlIgUyLVRMRAw",
|
||||
"agR0ZXN0chsLEg9Db250YWN0UmVzb3VyY2UiBjQtUk9JRAw",
|
||||
"agR0ZXN0chgLEgxIb3N0UmVzb3VyY2UiBjUtUk9JRAw");
|
||||
assertInStdout("fullyQualifiedDomainName=example.tld");
|
||||
assertInStdout("contactId=sh8013");
|
||||
assertInStdout("fullyQualifiedHostName=ns1.example.tld");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailure_keyDoesNotExist() {
|
||||
NullPointerException thrown =
|
||||
assertThrows(
|
||||
NullPointerException.class,
|
||||
() -> runCommand("agR0ZXN0chULEgpEb21haW5CYXNlIgUyLVRMRAw"));
|
||||
assertThat(thrown)
|
||||
.hasMessageThat()
|
||||
.contains("Could not load resource for key: VKey<DomainBase>(sql:2-TLD,ofy:2-TLD)");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailure_nonsenseKey() {
|
||||
IllegalArgumentException thrown =
|
||||
assertThrows(
|
||||
IllegalArgumentException.class, () -> runCommand("agR0ZXN0chULEgpEb21haW5CYXN"));
|
||||
assertThat(thrown).hasMessageThat().contains("Could not parse Reference");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFailure_noParameters() {
|
||||
assertThrows(ParameterException.class, this::runCommand);
|
||||
}
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.tools;
|
||||
|
||||
import static google.registry.tools.LevelDbLogReader.BLOCK_SIZE;
|
||||
import static google.registry.tools.LevelDbLogReader.HEADER_SIZE;
|
||||
|
||||
import com.google.appengine.api.datastore.Entity;
|
||||
import com.google.appengine.api.datastore.EntityTranslator;
|
||||
import com.google.storage.onestore.v3.OnestoreEntity.EntityProto;
|
||||
import google.registry.model.annotations.DeleteAfterMigration;
|
||||
import google.registry.tools.LevelDbLogReader.ChunkType;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
/** Utility class for building a leveldb logfile. */
|
||||
@DeleteAfterMigration
|
||||
public final class LevelDbFileBuilder {
|
||||
|
||||
private final FileOutputStream out;
|
||||
private byte[] currentBlock = new byte[BLOCK_SIZE];
|
||||
|
||||
// Write position in the current block.
|
||||
private int currentPos = 0;
|
||||
|
||||
public LevelDbFileBuilder(File file) throws FileNotFoundException {
|
||||
out = new FileOutputStream(file);
|
||||
}
|
||||
|
||||
/** Adds an {@link Entity Datastore Entity object} to the leveldb log file. */
|
||||
public LevelDbFileBuilder addEntity(Entity entity) throws IOException {
|
||||
EntityProto proto = EntityTranslator.convertToPb(entity);
|
||||
byte[] protoBytes = proto.toByteArray();
|
||||
if (protoBytes.length > BLOCK_SIZE - (currentPos + HEADER_SIZE)) {
|
||||
out.write(currentBlock);
|
||||
currentBlock = new byte[BLOCK_SIZE];
|
||||
currentPos = 0;
|
||||
}
|
||||
|
||||
currentPos = LevelDbUtil.addRecord(currentBlock, currentPos, ChunkType.FULL, protoBytes);
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Writes all remaining data and closes the block. */
|
||||
public void build() throws IOException {
|
||||
out.write(currentBlock);
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
@@ -1,123 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.tools;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.persistence.transaction.TransactionManagerFactory.tm;
|
||||
|
||||
import com.google.appengine.api.datastore.Entity;
|
||||
import com.google.appengine.api.datastore.EntityTranslator;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.storage.onestore.v3.OnestoreEntity.EntityProto;
|
||||
import google.registry.model.contact.ContactResource;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.DatabaseHelper;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import google.registry.tools.EntityWrapper.Property;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
|
||||
/** Unit tests for {@link LevelDbFileBuilder}. */
|
||||
public class LevelDbFileBuilderTest {
|
||||
|
||||
private static final int BASE_ID = 1001;
|
||||
|
||||
@TempDir Path tmpDir;
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().build();
|
||||
|
||||
@Test
|
||||
void testSingleRecordWrites() throws IOException {
|
||||
File logFile = tmpDir.resolve("testfile").toFile();
|
||||
LevelDbFileBuilder builder = new LevelDbFileBuilder(logFile);
|
||||
EntityWrapper entity =
|
||||
EntityWrapper.from(
|
||||
BASE_ID, Property.create("first", 100L), Property.create("second", 200L));
|
||||
builder.addEntity(entity.getEntity());
|
||||
builder.build();
|
||||
|
||||
ImmutableList<byte[]> records = ImmutableList.copyOf(LevelDbLogReader.from(logFile.getPath()));
|
||||
assertThat(records).hasSize(1);
|
||||
|
||||
// Reconstitute an entity, make sure that what we've got is the same as what we started with.
|
||||
Entity materializedEntity = rawRecordToEntity(records.get(0));
|
||||
assertThat(new EntityWrapper(materializedEntity)).isEqualTo(entity);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMultipleRecordWrites() throws IOException {
|
||||
File logFile = tmpDir.resolve("testfile").toFile();
|
||||
LevelDbFileBuilder builder = new LevelDbFileBuilder(logFile);
|
||||
|
||||
// Generate enough records to cross a block boundary. These records end up being around 80
|
||||
// bytes, so 1000 works.
|
||||
ImmutableList.Builder<EntityWrapper> originalEntitiesBuilder = new ImmutableList.Builder<>();
|
||||
for (int i = 0; i < 1000; ++i) {
|
||||
EntityWrapper entity =
|
||||
EntityWrapper.from(
|
||||
BASE_ID + i, Property.create("first", 100L), Property.create("second", 200L));
|
||||
builder.addEntity(entity.getEntity());
|
||||
originalEntitiesBuilder.add(entity);
|
||||
}
|
||||
builder.build();
|
||||
ImmutableList<EntityWrapper> originalEntities = originalEntitiesBuilder.build();
|
||||
|
||||
ImmutableList<byte[]> records = ImmutableList.copyOf(LevelDbLogReader.from(logFile.getPath()));
|
||||
assertThat(records).hasSize(1000);
|
||||
int index = 0;
|
||||
for (byte[] record : records) {
|
||||
Entity materializedEntity = rawRecordToEntity(record);
|
||||
assertThat(new EntityWrapper(materializedEntity)).isEqualTo(originalEntities.get(index));
|
||||
++index;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testOfyEntityWrite() throws Exception {
|
||||
File logFile = tmpDir.resolve("testfile").toFile();
|
||||
LevelDbFileBuilder builder = new LevelDbFileBuilder(logFile);
|
||||
|
||||
ContactResource contact = DatabaseHelper.newContactResource("contact");
|
||||
builder.addEntity(tm().transact(() -> auditedOfy().save().toEntity(contact)));
|
||||
builder.build();
|
||||
|
||||
ImmutableList<byte[]> records = ImmutableList.copyOf(LevelDbLogReader.from(logFile.getPath()));
|
||||
assertThat(records).hasSize(1);
|
||||
ContactResource ofyEntity = rawRecordToOfyEntity(records.get(0), ContactResource.class);
|
||||
assertThat(ofyEntity.getContactId()).isEqualTo(contact.getContactId());
|
||||
}
|
||||
|
||||
private static Entity rawRecordToEntity(byte[] record) {
|
||||
EntityProto proto = new EntityProto();
|
||||
proto.parseFrom(record);
|
||||
return EntityTranslator.createFromPb(proto);
|
||||
}
|
||||
|
||||
private static <T> T rawRecordToOfyEntity(byte[] record, Class<T> expectedType) {
|
||||
return expectedType.cast(auditedOfy().load().fromEntity(rawRecordToEntity(record)));
|
||||
}
|
||||
}
|
||||
@@ -1,85 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.tools;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.tools.EntityWrapper.Property;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
|
||||
/** Unit tests for {@link RecordAccumulator}. */
|
||||
public class RecordAccumulatorTest {
|
||||
|
||||
private static final int BASE_ID = 1001;
|
||||
|
||||
@TempDir public File tmpDir;
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder().withDatastoreAndCloudSql().build();
|
||||
|
||||
@Test
|
||||
void testReadDirectory() throws IOException {
|
||||
LevelDbFileBuilder builder = new LevelDbFileBuilder(new File(tmpDir, "data1"));
|
||||
|
||||
// Note that we need to specify property values as "Long" for property comparisons to work
|
||||
// correctly because that's how they are deserialized from protos.
|
||||
EntityWrapper e1 =
|
||||
EntityWrapper.from(
|
||||
BASE_ID,
|
||||
Property.create("eeny", 100L),
|
||||
Property.create("meeny", 200L),
|
||||
Property.create("miney", 300L));
|
||||
builder.addEntity(e1.getEntity());
|
||||
EntityWrapper e2 =
|
||||
EntityWrapper.from(
|
||||
BASE_ID + 1,
|
||||
Property.create("eeny", 100L),
|
||||
Property.create("meeny", 200L),
|
||||
Property.create("miney", 300L));
|
||||
builder.addEntity(e2.getEntity());
|
||||
builder.build();
|
||||
|
||||
builder = new LevelDbFileBuilder(new File(tmpDir, "data2"));
|
||||
|
||||
// Duplicate of the record in the other file.
|
||||
builder.addEntity(
|
||||
EntityWrapper.from(
|
||||
BASE_ID,
|
||||
Property.create("eeny", 100L),
|
||||
Property.create("meeny", 200L),
|
||||
Property.create("miney", 300L))
|
||||
.getEntity());
|
||||
|
||||
EntityWrapper e3 =
|
||||
EntityWrapper.from(
|
||||
BASE_ID + 2,
|
||||
Property.create("moxy", 100L),
|
||||
Property.create("fruvis", 200L),
|
||||
Property.create("cortex", 300L));
|
||||
builder.addEntity(e3.getEntity());
|
||||
builder.build();
|
||||
|
||||
ImmutableSet<EntityWrapper> entities =
|
||||
RecordAccumulator.readDirectory(tmpDir, any -> true).getEntityWrapperSet();
|
||||
assertThat(entities).containsExactly(e1, e2, e3);
|
||||
}
|
||||
}
|
||||
@@ -1,130 +0,0 @@
|
||||
// Copyright 2020 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.tools;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ImmutableObjectSubject.immutableObjectCorrespondence;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.newDomainBase;
|
||||
import static google.registry.testing.DatabaseHelper.persistResource;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.truth.Correspondence;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.billing.BillingEvent;
|
||||
import google.registry.model.common.EntityGroupRoot;
|
||||
import google.registry.model.domain.DomainBase;
|
||||
import google.registry.model.poll.PollMessage;
|
||||
import google.registry.model.reporting.HistoryEntry;
|
||||
import google.registry.persistence.VKey;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit test for {@link RemoveRegistryOneKeyCommand}. */
|
||||
public class RemoveRegistryOneKeyCommandTest extends CommandTestCase<RemoveRegistryOneKeyCommand> {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
DomainBase domain;
|
||||
HistoryEntry historyEntry;
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
createTld("foobar");
|
||||
domain =
|
||||
newDomainBase("foo.foobar")
|
||||
.asBuilder()
|
||||
.setDeletionTime(DateTime.parse("2016-01-01T00:00:00Z"))
|
||||
.setAutorenewBillingEvent(createRegistryOneVKey(BillingEvent.Recurring.class, 100L))
|
||||
.setAutorenewPollMessage(createRegistryOneVKey(PollMessage.Autorenew.class, 200L))
|
||||
.setDeletePollMessage(createRegistryOneVKey(PollMessage.OneTime.class, 300L))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Test
|
||||
void removeRegistryOneKeyInDomainBase_succeeds() throws Exception {
|
||||
DomainBase origin = persistResource(domain);
|
||||
|
||||
runCommand(
|
||||
"--force",
|
||||
"--key_paths_file",
|
||||
writeToNamedTmpFile("keypath.txt", getKeyPathLiteral(domain)));
|
||||
|
||||
DomainBase persisted = auditedOfy().load().key(domain.createVKey().getOfyKey()).now();
|
||||
assertThat(ImmutableList.of(persisted))
|
||||
.comparingElementsUsing(getDomainBaseCorrespondence())
|
||||
.containsExactly(origin);
|
||||
assertThat(persisted.getAutorenewBillingEvent()).isNull();
|
||||
assertThat(persisted.getAutorenewPollMessage()).isNull();
|
||||
assertThat(persisted.getDeletePollMessage()).isNull();
|
||||
}
|
||||
|
||||
@Test
|
||||
void removeRegistryOneKeyInDomainBase_notModifyRegistryTwoKey() throws Exception {
|
||||
DomainBase origin =
|
||||
persistResource(
|
||||
domain
|
||||
.asBuilder()
|
||||
.setAutorenewBillingEvent(
|
||||
createRegistryTwoVKey(BillingEvent.Recurring.class, domain, 300L))
|
||||
.build());
|
||||
|
||||
runCommand(
|
||||
"--force",
|
||||
"--key_paths_file",
|
||||
writeToNamedTmpFile("keypath.txt", getKeyPathLiteral(domain)));
|
||||
|
||||
DomainBase persisted = auditedOfy().load().key(domain.createVKey().getOfyKey()).now();
|
||||
assertThat(ImmutableList.of(persisted))
|
||||
.comparingElementsUsing(getDomainBaseCorrespondence())
|
||||
.containsExactly(origin);
|
||||
assertThat(persisted.getAutorenewBillingEvent())
|
||||
.isEqualTo(createRegistryTwoVKey(BillingEvent.Recurring.class, domain, 300L));
|
||||
assertThat(persisted.getAutorenewPollMessage()).isNull();
|
||||
assertThat(persisted.getDeletePollMessage()).isNull();
|
||||
}
|
||||
|
||||
private static String getKeyPathLiteral(Object entity) {
|
||||
Key<?> key = Key.create(entity);
|
||||
return String.format("\"DomainBase\", \"%s\"", key.getName());
|
||||
}
|
||||
|
||||
private static <T> VKey<T> createRegistryOneVKey(Class<T> clazz, long id) {
|
||||
Key<?> parent = Key.create(EntityGroupRoot.class, "per-tld");
|
||||
return VKey.create(clazz, id, Key.create(parent, clazz, id));
|
||||
}
|
||||
|
||||
private static <T> VKey<T> createRegistryTwoVKey(Class<T> clazz, DomainBase domain, long id) {
|
||||
Key<?> parent = Key.create(domain.createVKey().getOfyKey(), HistoryEntry.class, 1000L);
|
||||
return VKey.create(clazz, id, Key.create(parent, clazz, id));
|
||||
}
|
||||
|
||||
private static Correspondence<ImmutableObject, ImmutableObject> getDomainBaseCorrespondence() {
|
||||
return immutableObjectCorrespondence(
|
||||
"revisions",
|
||||
"updateTimestamp",
|
||||
"autorenewBillingEvent",
|
||||
"autorenewPollMessage",
|
||||
"deletePollMessage");
|
||||
}
|
||||
}
|
||||
@@ -1,88 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.tools;
|
||||
|
||||
import static com.google.common.collect.Iterables.transform;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveContact;
|
||||
|
||||
import com.google.appengine.api.datastore.KeyFactory;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.contact.ContactResource;
|
||||
import google.registry.model.ofy.CommitLogManifest;
|
||||
import google.registry.model.ofy.CommitLogMutation;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link ResaveEntitiesCommand}. */
|
||||
class ResaveEntitiesCommandTest extends CommandTestCase<ResaveEntitiesCommand> {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@Test
|
||||
void testSuccess_createsCommitLogs() throws Exception {
|
||||
ContactResource contact1 = persistActiveContact("contact1");
|
||||
ContactResource contact2 = persistActiveContact("contact2");
|
||||
deleteEntitiesOfTypes(CommitLogManifest.class, CommitLogMutation.class);
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class).keys()).isEmpty();
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class).keys()).isEmpty();
|
||||
runCommandForced(
|
||||
KeyFactory.keyToString(Key.create(contact1).getRaw()),
|
||||
KeyFactory.keyToString(Key.create(contact2).getRaw()));
|
||||
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class).keys()).hasSize(1);
|
||||
Iterable<ImmutableObject> savedEntities =
|
||||
transform(
|
||||
auditedOfy().load().type(CommitLogMutation.class).list(),
|
||||
mutation -> auditedOfy().load().fromEntity(mutation.getEntity()));
|
||||
// Reload the contacts before asserting, since their update times will have changed.
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(savedEntities)
|
||||
.containsExactlyElementsIn(auditedOfy().load().entities(contact1, contact2).values());
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_createsCommitLogs_withNewWebsafeKey() throws Exception {
|
||||
ContactResource contact1 = persistActiveContact("contact1");
|
||||
ContactResource contact2 = persistActiveContact("contact2");
|
||||
deleteEntitiesOfTypes(CommitLogManifest.class, CommitLogMutation.class);
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class).keys()).isEmpty();
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class).keys()).isEmpty();
|
||||
runCommandForced(contact1.createVKey().stringify(), contact2.createVKey().stringify());
|
||||
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class).keys()).hasSize(1);
|
||||
Iterable<ImmutableObject> savedEntities =
|
||||
transform(
|
||||
auditedOfy().load().type(CommitLogMutation.class).list(),
|
||||
mutation -> auditedOfy().load().fromEntity(mutation.getEntity()));
|
||||
// Reload the contacts before asserting, since their update times will have changed.
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(savedEntities)
|
||||
.containsExactlyElementsIn(auditedOfy().load().entities(contact1, contact2).values());
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
private static void deleteEntitiesOfTypes(Class<? extends ImmutableObject>... types) {
|
||||
for (Class<? extends ImmutableObject> type : types) {
|
||||
auditedOfy().deleteWithoutBackup().keys(auditedOfy().load().type(type).keys()).now();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,92 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.tools;
|
||||
|
||||
import static com.google.common.collect.Iterables.getOnlyElement;
|
||||
import static com.google.common.collect.Iterables.transform;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.loadRegistrar;
|
||||
|
||||
import com.google.common.collect.ImmutableSortedSet;
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.ofy.CommitLogManifest;
|
||||
import google.registry.model.ofy.CommitLogMutation;
|
||||
import google.registry.model.registrar.Registrar;
|
||||
import google.registry.model.registrar.RegistrarContact;
|
||||
import google.registry.model.tld.Registry;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link ResaveEnvironmentEntitiesCommand}. */
|
||||
class ResaveEnvironmentEntitiesCommandTest
|
||||
extends CommandTestCase<ResaveEnvironmentEntitiesCommand> {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@Test
|
||||
void testSuccess_noop() throws Exception {
|
||||
// Get rid of all the entities that this command runs on so that it does nothing.
|
||||
deleteEntitiesOfTypes(
|
||||
Registry.class,
|
||||
Registrar.class,
|
||||
RegistrarContact.class,
|
||||
CommitLogManifest.class,
|
||||
CommitLogMutation.class);
|
||||
runCommand();
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class).keys()).isEmpty();
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class).keys()).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSuccess_createsCommitLogs() throws Exception {
|
||||
createTld("tld");
|
||||
deleteEntitiesOfTypes(CommitLogManifest.class, CommitLogMutation.class);
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class).keys()).isEmpty();
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class).keys()).isEmpty();
|
||||
runCommand();
|
||||
|
||||
// There are 5 entities that have been re-saved at this point (in 3 transactions, one for each
|
||||
// type), so expect 3 manifests and 5 mutations.
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class).keys()).hasSize(3);
|
||||
Iterable<ImmutableObject> savedEntities =
|
||||
transform(
|
||||
auditedOfy().load().type(CommitLogMutation.class).list(),
|
||||
mutation -> auditedOfy().load().fromEntity(mutation.getEntity()));
|
||||
ImmutableSortedSet<RegistrarContact> theRegistrarContacts =
|
||||
loadRegistrar("TheRegistrar").getContacts();
|
||||
assertThat(savedEntities)
|
||||
.containsExactly(
|
||||
// The Registrars and RegistrarContacts are created by AppEngineExtension.
|
||||
loadRegistrar("TheRegistrar"),
|
||||
loadRegistrar("NewRegistrar"),
|
||||
Registry.get("tld"),
|
||||
theRegistrarContacts.first(),
|
||||
theRegistrarContacts.last(),
|
||||
getOnlyElement(loadRegistrar("NewRegistrar").getContacts()));
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
private static void deleteEntitiesOfTypes(Class<? extends ImmutableObject>... types) {
|
||||
for (Class<? extends ImmutableObject> type : types) {
|
||||
auditedOfy().deleteWithoutBackup().keys(auditedOfy().load().type(type).keys()).now();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.tools;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveContact;
|
||||
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.contact.ContactResource;
|
||||
import google.registry.model.ofy.CommitLogManifest;
|
||||
import google.registry.model.ofy.CommitLogMutation;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link ResaveEppResourceCommand}. */
|
||||
class ResaveEppResourcesCommandTest extends CommandTestCase<ResaveEppResourceCommand> {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
@Test
|
||||
void testSuccess_createsCommitLogs() throws Exception {
|
||||
ContactResource contact = persistActiveContact("contact");
|
||||
deleteEntitiesOfTypes(CommitLogManifest.class, CommitLogMutation.class);
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class).keys()).isEmpty();
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class).keys()).isEmpty();
|
||||
runCommandForced("--type=CONTACT", "--id=contact");
|
||||
|
||||
assertThat(auditedOfy().load().type(CommitLogManifest.class).keys()).hasSize(1);
|
||||
assertThat(auditedOfy().load().type(CommitLogMutation.class).keys()).hasSize(1);
|
||||
CommitLogMutation mutation = auditedOfy().load().type(CommitLogMutation.class).first().now();
|
||||
// Reload the contact before asserting, since its update time will have changed.
|
||||
auditedOfy().clearSessionCache();
|
||||
assertThat(auditedOfy().load().<Object>fromEntity(mutation.getEntity()))
|
||||
.isEqualTo(auditedOfy().load().entity(contact).now());
|
||||
}
|
||||
|
||||
@SafeVarargs
|
||||
private static void deleteEntitiesOfTypes(Class<? extends ImmutableObject>... types) {
|
||||
for (Class<? extends ImmutableObject> type : types) {
|
||||
auditedOfy().deleteWithoutBackup().keys(auditedOfy().load().type(type).keys()).now();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -124,7 +124,7 @@ class GenerateZoneFilesActionTest {
|
||||
GenerateZoneFilesAction action = new GenerateZoneFilesAction();
|
||||
action.bucket = "zonefiles-bucket";
|
||||
action.gcsUtils = gcsUtils;
|
||||
action.datastoreRetention = standardDays(29);
|
||||
action.databaseRetention = standardDays(29);
|
||||
action.dnsDefaultATtl = Duration.standardSeconds(11);
|
||||
action.dnsDefaultNsTtl = Duration.standardSeconds(222);
|
||||
action.dnsDefaultDsTtl = Duration.standardSeconds(3333);
|
||||
|
||||
@@ -1,106 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.tools.server;
|
||||
|
||||
import static com.google.common.base.Predicates.instanceOf;
|
||||
import static com.google.common.base.Predicates.not;
|
||||
import static com.google.common.collect.ImmutableList.toImmutableList;
|
||||
import static com.google.common.collect.Iterables.filter;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static com.google.common.truth.Truth.assertWithMessage;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.newContactResource;
|
||||
import static google.registry.testing.DatabaseHelper.persistResource;
|
||||
import static google.registry.testing.DatabaseHelper.persistResourceWithCommitLog;
|
||||
import static google.registry.util.DateTimeUtils.START_OF_TIME;
|
||||
import static java.util.Arrays.asList;
|
||||
|
||||
import com.google.appengine.api.datastore.Entity;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Streams;
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.ofy.CommitLogBucket;
|
||||
import google.registry.model.ofy.CommitLogCheckpoint;
|
||||
import google.registry.model.ofy.CommitLogCheckpointRoot;
|
||||
import google.registry.model.ofy.CommitLogManifest;
|
||||
import google.registry.model.ofy.CommitLogMutation;
|
||||
import google.registry.testing.FakeResponse;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import google.registry.testing.mapreduce.MapreduceTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Tests for {@link KillAllCommitLogsAction}. */
|
||||
class KillAllCommitLogsActionTest extends MapreduceTestCase<KillAllCommitLogsAction> {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
private static final ImmutableList<Class<? extends ImmutableObject>> AFFECTED_TYPES =
|
||||
ImmutableList.of(
|
||||
CommitLogBucket.class,
|
||||
CommitLogCheckpoint.class,
|
||||
CommitLogCheckpointRoot.class,
|
||||
CommitLogMutation.class,
|
||||
CommitLogManifest.class);
|
||||
|
||||
private void runMapreduce() throws Exception {
|
||||
action = new KillAllCommitLogsAction();
|
||||
action.mrRunner = makeDefaultRunner();
|
||||
action.response = new FakeResponse();
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce");
|
||||
}
|
||||
|
||||
@Test
|
||||
void testKill() throws Exception {
|
||||
int nextContactId = 5432;
|
||||
for (String tld : asList("tld1", "tld2")) {
|
||||
createTld(tld);
|
||||
persistResourceWithCommitLog(
|
||||
newContactResource(String.format("abc%d", nextContactId++)));
|
||||
}
|
||||
persistResource(CommitLogCheckpointRoot.create(START_OF_TIME.plusDays(1)));
|
||||
DateTime bucketTime = START_OF_TIME.plusDays(2);
|
||||
persistResource(
|
||||
CommitLogCheckpoint.create(
|
||||
START_OF_TIME.plusDays(1),
|
||||
ImmutableMap.of(1, bucketTime, 2, bucketTime, 3, bucketTime)));
|
||||
for (Class<?> clazz : AFFECTED_TYPES) {
|
||||
assertWithMessage("entities of type " + clazz)
|
||||
.that(auditedOfy().load().type(clazz))
|
||||
.isNotEmpty();
|
||||
}
|
||||
ImmutableList<?> otherStuff =
|
||||
Streams.stream(auditedOfy().load())
|
||||
.filter(obj -> !AFFECTED_TYPES.contains(obj.getClass()))
|
||||
.collect(toImmutableList());
|
||||
assertThat(otherStuff).isNotEmpty();
|
||||
runMapreduce();
|
||||
for (Class<?> clazz : AFFECTED_TYPES) {
|
||||
assertWithMessage("entities of type " + clazz)
|
||||
.that(auditedOfy().load().type(clazz))
|
||||
.isEmpty();
|
||||
}
|
||||
// Filter out raw Entity objects created by the mapreduce.
|
||||
assertThat(filter(auditedOfy().load(), not(instanceOf(Entity.class))))
|
||||
.containsExactlyElementsIn(otherStuff);
|
||||
}
|
||||
}
|
||||
@@ -1,178 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.tools.server;
|
||||
|
||||
import static com.google.appengine.repackaged.com.google.common.collect.Sets.difference;
|
||||
import static com.google.common.base.Predicates.in;
|
||||
import static com.google.common.base.Predicates.instanceOf;
|
||||
import static com.google.common.base.Predicates.not;
|
||||
import static com.google.common.collect.ImmutableSet.toImmutableSet;
|
||||
import static com.google.common.collect.Multimaps.filterKeys;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveContact;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveDomain;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveHost;
|
||||
import static google.registry.util.DateTimeUtils.START_OF_TIME;
|
||||
import static java.util.Arrays.asList;
|
||||
|
||||
import com.google.appengine.api.datastore.Entity;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableMultimap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.googlecode.objectify.Key;
|
||||
import google.registry.model.EppResource;
|
||||
import google.registry.model.ImmutableObject;
|
||||
import google.registry.model.billing.BillingEvent;
|
||||
import google.registry.model.billing.BillingEvent.Reason;
|
||||
import google.registry.model.contact.ContactResource;
|
||||
import google.registry.model.domain.DomainBase;
|
||||
import google.registry.model.domain.DomainHistory;
|
||||
import google.registry.model.host.HostResource;
|
||||
import google.registry.model.index.EppResourceIndex;
|
||||
import google.registry.model.index.ForeignKeyIndex.ForeignKeyContactIndex;
|
||||
import google.registry.model.index.ForeignKeyIndex.ForeignKeyDomainIndex;
|
||||
import google.registry.model.index.ForeignKeyIndex.ForeignKeyHostIndex;
|
||||
import google.registry.model.poll.PollMessage;
|
||||
import google.registry.model.reporting.HistoryEntry;
|
||||
import google.registry.testing.DatabaseHelper;
|
||||
import google.registry.testing.FakeResponse;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import google.registry.testing.mapreduce.MapreduceTestCase;
|
||||
import java.util.stream.Stream;
|
||||
import org.joda.money.CurrencyUnit;
|
||||
import org.joda.money.Money;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Tests for {@link KillAllEppResourcesAction}. */
|
||||
class KillAllEppResourcesActionTest extends MapreduceTestCase<KillAllEppResourcesAction> {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
private static final ImmutableSet<String> AFFECTED_KINDS =
|
||||
Stream.of(
|
||||
EppResourceIndex.class,
|
||||
ForeignKeyContactIndex.class,
|
||||
ForeignKeyDomainIndex.class,
|
||||
ForeignKeyHostIndex.class,
|
||||
DomainBase.class,
|
||||
ContactResource.class,
|
||||
HostResource.class,
|
||||
HistoryEntry.class,
|
||||
PollMessage.class,
|
||||
BillingEvent.OneTime.class,
|
||||
BillingEvent.Recurring.class)
|
||||
.map(Key::getKind)
|
||||
.collect(toImmutableSet());
|
||||
|
||||
private void runMapreduce() throws Exception {
|
||||
action = new KillAllEppResourcesAction();
|
||||
action.mrRunner = makeDefaultRunner();
|
||||
action.response = new FakeResponse();
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce");
|
||||
}
|
||||
|
||||
private static final ImmutableMap<Class<? extends EppResource>, HistoryEntry.Type>
|
||||
HISTORY_ENTRY_CREATE_TYPES =
|
||||
ImmutableMap.of(
|
||||
DomainBase.class,
|
||||
HistoryEntry.Type.DOMAIN_CREATE,
|
||||
ContactResource.class,
|
||||
HistoryEntry.Type.CONTACT_CREATE,
|
||||
HostResource.class,
|
||||
HistoryEntry.Type.HOST_CREATE);
|
||||
|
||||
@Test
|
||||
void testKill() throws Exception {
|
||||
createTld("tld1");
|
||||
createTld("tld2");
|
||||
for (EppResource resource :
|
||||
asList(
|
||||
persistActiveDomain("foo.tld1"),
|
||||
persistActiveDomain("foo.tld2"),
|
||||
persistActiveContact("foo"),
|
||||
persistActiveContact("foo"),
|
||||
persistActiveHost("ns.foo.tld1"),
|
||||
persistActiveHost("ns.foo.tld2"))) {
|
||||
HistoryEntry history =
|
||||
HistoryEntry.createBuilderForResource(resource)
|
||||
.setRegistrarId(resource.getCreationRegistrarId())
|
||||
.setModificationTime(resource.getCreationTime())
|
||||
.setType(HISTORY_ENTRY_CREATE_TYPES.get(resource.getClass()))
|
||||
.build();
|
||||
ImmutableList.Builder<ImmutableObject> descendantBuilder =
|
||||
new ImmutableList.Builder<ImmutableObject>()
|
||||
.add(
|
||||
history,
|
||||
new PollMessage.OneTime.Builder()
|
||||
.setParent(history)
|
||||
.setRegistrarId("")
|
||||
.setEventTime(START_OF_TIME)
|
||||
.build(),
|
||||
new PollMessage.Autorenew.Builder()
|
||||
.setParent(history)
|
||||
.setRegistrarId("")
|
||||
.setEventTime(START_OF_TIME)
|
||||
.build());
|
||||
if (history instanceof DomainHistory) {
|
||||
descendantBuilder.add(
|
||||
new BillingEvent.OneTime.Builder()
|
||||
.setParent((DomainHistory) history)
|
||||
.setBillingTime(START_OF_TIME)
|
||||
.setEventTime(START_OF_TIME)
|
||||
.setRegistrarId("")
|
||||
.setTargetId("")
|
||||
.setReason(Reason.CREATE)
|
||||
.setPeriodYears(1)
|
||||
.setCost(Money.of(CurrencyUnit.USD, 1))
|
||||
.build(),
|
||||
new BillingEvent.Recurring.Builder()
|
||||
.setParent((DomainHistory) history)
|
||||
.setEventTime(START_OF_TIME)
|
||||
.setRegistrarId("")
|
||||
.setTargetId("")
|
||||
.setReason(Reason.RENEW)
|
||||
.build());
|
||||
}
|
||||
descendantBuilder.build().forEach(DatabaseHelper::persistResource);
|
||||
}
|
||||
ImmutableMultimap<String, Object> beforeContents = getDatastoreContents();
|
||||
assertThat(beforeContents.keySet()).containsAtLeastElementsIn(AFFECTED_KINDS);
|
||||
assertThat(difference(beforeContents.keySet(), AFFECTED_KINDS)).isNotEmpty();
|
||||
runMapreduce();
|
||||
auditedOfy().clearSessionCache();
|
||||
ImmutableMultimap<String, Object> afterContents = getDatastoreContents();
|
||||
assertThat(afterContents.keySet()).containsNoneIn(AFFECTED_KINDS);
|
||||
assertThat(afterContents)
|
||||
.containsExactlyEntriesIn(filterKeys(beforeContents, not(in(AFFECTED_KINDS))));
|
||||
}
|
||||
|
||||
private ImmutableMultimap<String, Object> getDatastoreContents() {
|
||||
ImmutableMultimap.Builder<String, Object> contentsBuilder = new ImmutableMultimap.Builder<>();
|
||||
// Filter out raw Entity objects created by the mapreduce.
|
||||
for (Object obj : Iterables.filter(auditedOfy().load(), not(instanceOf(Entity.class)))) {
|
||||
contentsBuilder.put(Key.getKind(obj.getClass()), obj);
|
||||
}
|
||||
return contentsBuilder.build();
|
||||
}
|
||||
}
|
||||
@@ -18,48 +18,54 @@ import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveDomain;
|
||||
import static google.registry.testing.DatabaseHelper.persistDeletedDomain;
|
||||
import static google.registry.testing.TaskQueueHelper.assertDnsTasksEnqueued;
|
||||
import static org.joda.time.Duration.standardMinutes;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.doThrow;
|
||||
import static org.mockito.Mockito.inOrder;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.never;
|
||||
import static org.mockito.Mockito.spy;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.verifyNoMoreInteractions;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import google.registry.dns.DnsQueue;
|
||||
import google.registry.model.ofy.Ofy;
|
||||
import google.registry.testing.AppEngineExtension;
|
||||
import google.registry.testing.DualDatabaseTest;
|
||||
import google.registry.testing.FakeClock;
|
||||
import google.registry.testing.FakeResponse;
|
||||
import google.registry.testing.InjectExtension;
|
||||
import google.registry.testing.TestOfyAndSql;
|
||||
import google.registry.testing.TestSqlOnly;
|
||||
import google.registry.testing.mapreduce.MapreduceTestCase;
|
||||
import google.registry.tools.server.RefreshDnsForAllDomainsAction.RefreshDnsForAllDomainsActionMapper;
|
||||
import java.util.Random;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Duration;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.InOrder;
|
||||
|
||||
/** Unit tests for {@link RefreshDnsForAllDomainsAction}. */
|
||||
@DualDatabaseTest
|
||||
public class RefreshDnsForAllDomainsActionTest
|
||||
extends MapreduceTestCase<RefreshDnsForAllDomainsAction> {
|
||||
public class RefreshDnsForAllDomainsActionTest {
|
||||
|
||||
private final FakeClock clock = new FakeClock(DateTime.parse("2020-02-02T02:02:02Z"));
|
||||
private final DnsQueue dnsQueue = mock(DnsQueue.class);
|
||||
private DnsQueue origDnsQueue;
|
||||
private RefreshDnsForAllDomainsAction action;
|
||||
private FakeResponse response = new FakeResponse();
|
||||
|
||||
@RegisterExtension
|
||||
public final AppEngineExtension appEngine =
|
||||
AppEngineExtension.builder()
|
||||
.withDatastoreAndCloudSql()
|
||||
.withLocalModules()
|
||||
.withTaskQueue()
|
||||
.build();
|
||||
|
||||
@Order(Order.DEFAULT - 1)
|
||||
@RegisterExtension
|
||||
public final InjectExtension inject =
|
||||
@@ -67,13 +73,10 @@ public class RefreshDnsForAllDomainsActionTest
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
origDnsQueue = RefreshDnsForAllDomainsActionMapper.setDnsQueueForTest(dnsQueue);
|
||||
|
||||
action = new RefreshDnsForAllDomainsAction();
|
||||
action.smearMinutes = 1;
|
||||
action.random = new Random();
|
||||
action.random.setSeed(123L);
|
||||
action.mrRunner = makeDefaultRunner();
|
||||
action.response = new FakeResponse();
|
||||
action.clock = clock;
|
||||
action.dnsQueue = dnsQueue;
|
||||
@@ -82,30 +85,21 @@ public class RefreshDnsForAllDomainsActionTest
|
||||
createTld("bar");
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
void afterEach() {
|
||||
assertThat(RefreshDnsForAllDomainsActionMapper.setDnsQueueForTest(origDnsQueue))
|
||||
.isEqualTo(dnsQueue);
|
||||
}
|
||||
|
||||
private void runAction() throws Exception {
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce");
|
||||
}
|
||||
|
||||
@TestSqlOnly
|
||||
void test_runAction_errorEnqueuingToDnsQueue() throws Exception {
|
||||
persistActiveDomain("foo.bar");
|
||||
persistActiveDomain("baz.bar");
|
||||
persistActiveDomain("low.bar");
|
||||
action.tlds = ImmutableSet.of("bar");
|
||||
DnsQueue faultyQueue = spy(origDnsQueue);
|
||||
doThrow(new RuntimeException("Error enqueuing task."))
|
||||
.when(faultyQueue)
|
||||
.when(dnsQueue)
|
||||
.addDomainRefreshTask(eq("baz.bar"), any(Duration.class));
|
||||
action.dnsQueue = faultyQueue;
|
||||
runAction();
|
||||
assertDnsTasksEnqueued("foo.bar", "low.bar");
|
||||
action.run();
|
||||
InOrder inOrder = inOrder(dnsQueue);
|
||||
inOrder.verify(dnsQueue).addDomainRefreshTask("low.bar", Duration.ZERO);
|
||||
inOrder.verify(dnsQueue).addDomainRefreshTask("baz.bar", Duration.ZERO);
|
||||
inOrder.verify(dnsQueue).addDomainRefreshTask("foo.bar", Duration.ZERO);
|
||||
verifyNoMoreInteractions(dnsQueue);
|
||||
assertThat(response.getStatus()).isEqualTo(HttpStatus.SC_INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
|
||||
@@ -114,7 +108,7 @@ public class RefreshDnsForAllDomainsActionTest
|
||||
persistActiveDomain("foo.bar");
|
||||
persistActiveDomain("low.bar");
|
||||
action.tlds = ImmutableSet.of("bar");
|
||||
runAction();
|
||||
action.run();
|
||||
verify(dnsQueue).addDomainRefreshTask("foo.bar", Duration.ZERO);
|
||||
verify(dnsQueue).addDomainRefreshTask("low.bar", Duration.ZERO);
|
||||
}
|
||||
@@ -125,7 +119,7 @@ public class RefreshDnsForAllDomainsActionTest
|
||||
persistActiveDomain("low.bar");
|
||||
action.tlds = ImmutableSet.of("bar");
|
||||
action.smearMinutes = 1000;
|
||||
runAction();
|
||||
action.run();
|
||||
ArgumentCaptor<Duration> captor = ArgumentCaptor.forClass(Duration.class);
|
||||
verify(dnsQueue).addDomainRefreshTask(eq("foo.bar"), captor.capture());
|
||||
verify(dnsQueue).addDomainRefreshTask(eq("low.bar"), captor.capture());
|
||||
@@ -137,7 +131,7 @@ public class RefreshDnsForAllDomainsActionTest
|
||||
persistActiveDomain("foo.bar");
|
||||
persistDeletedDomain("deleted.bar", clock.nowUtc().minusYears(1));
|
||||
action.tlds = ImmutableSet.of("bar");
|
||||
runAction();
|
||||
action.run();
|
||||
verify(dnsQueue).addDomainRefreshTask("foo.bar", Duration.ZERO);
|
||||
verify(dnsQueue, never()).addDomainRefreshTask("deleted.bar", Duration.ZERO);
|
||||
}
|
||||
@@ -149,7 +143,7 @@ public class RefreshDnsForAllDomainsActionTest
|
||||
persistActiveDomain("low.bar");
|
||||
persistActiveDomain("ignore.baz");
|
||||
action.tlds = ImmutableSet.of("bar");
|
||||
runAction();
|
||||
action.run();
|
||||
verify(dnsQueue).addDomainRefreshTask("foo.bar", Duration.ZERO);
|
||||
verify(dnsQueue).addDomainRefreshTask("low.bar", Duration.ZERO);
|
||||
verify(dnsQueue, never()).addDomainRefreshTask("ignore.baz", Duration.ZERO);
|
||||
|
||||
@@ -1,106 +0,0 @@
|
||||
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package google.registry.tools.server;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static google.registry.model.ofy.ObjectifyService.auditedOfy;
|
||||
import static google.registry.testing.DatabaseHelper.createTld;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveContact;
|
||||
import static google.registry.testing.DatabaseHelper.persistActiveDomain;
|
||||
|
||||
import com.google.appengine.api.datastore.DatastoreService;
|
||||
import com.google.appengine.api.datastore.DatastoreServiceFactory;
|
||||
import com.google.appengine.api.datastore.Entity;
|
||||
import google.registry.model.contact.ContactHistory;
|
||||
import google.registry.model.contact.ContactResource;
|
||||
import google.registry.model.domain.DomainBase;
|
||||
import google.registry.model.domain.DomainHistory;
|
||||
import google.registry.model.reporting.HistoryEntry;
|
||||
import google.registry.testing.FakeResponse;
|
||||
import google.registry.testing.TmOverrideExtension;
|
||||
import google.registry.testing.mapreduce.MapreduceTestCase;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Order;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
|
||||
/** Unit tests for {@link ResaveAllHistoryEntriesAction}. */
|
||||
class ResaveAllHistoryEntriesActionTest extends MapreduceTestCase<ResaveAllHistoryEntriesAction> {
|
||||
|
||||
@RegisterExtension
|
||||
@Order(Order.DEFAULT - 1)
|
||||
TmOverrideExtension tmOverrideExtension = TmOverrideExtension.withOfy();
|
||||
|
||||
private static final DatastoreService datastoreService =
|
||||
DatastoreServiceFactory.getDatastoreService();
|
||||
|
||||
@BeforeEach
|
||||
void beforeEach() {
|
||||
action = new ResaveAllHistoryEntriesAction();
|
||||
action.mrRunner = makeDefaultRunner();
|
||||
action.response = new FakeResponse();
|
||||
}
|
||||
|
||||
private void runMapreduce() throws Exception {
|
||||
action.run();
|
||||
executeTasksUntilEmpty("mapreduce");
|
||||
}
|
||||
|
||||
@Test
|
||||
void test_mapreduceSuccessfullyResavesEntity() throws Exception {
|
||||
createTld("tld");
|
||||
DomainBase domain = persistActiveDomain("test.tld");
|
||||
ContactResource contact = persistActiveContact("humanBeing");
|
||||
Entity domainEntry =
|
||||
auditedOfy()
|
||||
.save()
|
||||
.toEntity(
|
||||
new DomainHistory.Builder()
|
||||
.setDomain(domain)
|
||||
.setType(HistoryEntry.Type.DOMAIN_CREATE)
|
||||
.setModificationTime(domain.getCreationTime())
|
||||
.setRegistrarId(domain.getCreationRegistrarId())
|
||||
.build()
|
||||
.asHistoryEntry());
|
||||
Entity contactEntry =
|
||||
auditedOfy()
|
||||
.save()
|
||||
.toEntity(
|
||||
new ContactHistory.Builder()
|
||||
.setContact(contact)
|
||||
.setType(HistoryEntry.Type.CONTACT_CREATE)
|
||||
.setRegistrarId(contact.getCreationRegistrarId())
|
||||
.setModificationTime(contact.getCreationTime())
|
||||
.build()
|
||||
.asHistoryEntry());
|
||||
|
||||
// Set raw properties outside the Objectify schema, which will be deleted upon re-save.
|
||||
domainEntry.setProperty("clientId", "validId");
|
||||
contactEntry.setProperty("otherClientId", "anotherId");
|
||||
domainEntry.setProperty("propertyToBeDeleted", "123blah");
|
||||
contactEntry.setProperty("alsoShouldBeDeleted", "456nah");
|
||||
datastoreService.put(domainEntry);
|
||||
datastoreService.put(contactEntry);
|
||||
auditedOfy().clearSessionCache();
|
||||
runMapreduce();
|
||||
|
||||
Entity updatedDomainEntry = datastoreService.get(domainEntry.getKey());
|
||||
Entity updatedContactEntry = datastoreService.get(contactEntry.getKey());
|
||||
assertThat(updatedDomainEntry.getProperty("clientId")).isEqualTo("validId");
|
||||
assertThat(updatedDomainEntry.getProperty("propertyToBeDeleted")).isNull();
|
||||
assertThat(updatedContactEntry.getProperty("otherClientId")).isEqualTo("anotherId");
|
||||
assertThat(updatedContactEntry.getProperty("alsoShouldBeDeleted")).isNull();
|
||||
}
|
||||
}
|
||||
@@ -108,7 +108,6 @@ class google.registry.model.contact.ContactAuthInfo {
|
||||
}
|
||||
class google.registry.model.contact.ContactBase {
|
||||
@Id java.lang.String repoId;
|
||||
com.google.common.collect.ImmutableSortedMap<org.joda.time.DateTime, com.googlecode.objectify.Key<google.registry.model.ofy.CommitLogManifest>> revisions;
|
||||
google.registry.model.CreateAutoTimestamp creationTime;
|
||||
google.registry.model.UpdateAutoTimestamp updateTimestamp;
|
||||
google.registry.model.contact.ContactAuthInfo authInfo;
|
||||
@@ -151,7 +150,6 @@ class google.registry.model.contact.ContactPhoneNumber {
|
||||
}
|
||||
class google.registry.model.contact.ContactResource {
|
||||
@Id java.lang.String repoId;
|
||||
com.google.common.collect.ImmutableSortedMap<org.joda.time.DateTime, com.googlecode.objectify.Key<google.registry.model.ofy.CommitLogManifest>> revisions;
|
||||
google.registry.model.CreateAutoTimestamp creationTime;
|
||||
google.registry.model.UpdateAutoTimestamp updateTimestamp;
|
||||
google.registry.model.contact.ContactAuthInfo authInfo;
|
||||
@@ -209,7 +207,6 @@ class google.registry.model.domain.DomainAuthInfo {
|
||||
}
|
||||
class google.registry.model.domain.DomainBase {
|
||||
@Id java.lang.String repoId;
|
||||
com.google.common.collect.ImmutableSortedMap<org.joda.time.DateTime, com.googlecode.objectify.Key<google.registry.model.ofy.CommitLogManifest>> revisions;
|
||||
google.registry.model.CreateAutoTimestamp creationTime;
|
||||
google.registry.model.UpdateAutoTimestamp updateTimestamp;
|
||||
google.registry.model.domain.DomainAuthInfo authInfo;
|
||||
@@ -239,7 +236,6 @@ class google.registry.model.domain.DomainBase {
|
||||
}
|
||||
class google.registry.model.domain.DomainContent {
|
||||
@Id java.lang.String repoId;
|
||||
com.google.common.collect.ImmutableSortedMap<org.joda.time.DateTime, com.googlecode.objectify.Key<google.registry.model.ofy.CommitLogManifest>> revisions;
|
||||
google.registry.model.CreateAutoTimestamp creationTime;
|
||||
google.registry.model.UpdateAutoTimestamp updateTimestamp;
|
||||
google.registry.model.domain.DomainAuthInfo authInfo;
|
||||
@@ -405,7 +401,6 @@ class google.registry.model.eppcommon.Trid {
|
||||
}
|
||||
class google.registry.model.host.HostBase {
|
||||
@Id java.lang.String repoId;
|
||||
com.google.common.collect.ImmutableSortedMap<org.joda.time.DateTime, com.googlecode.objectify.Key<google.registry.model.ofy.CommitLogManifest>> revisions;
|
||||
google.registry.model.CreateAutoTimestamp creationTime;
|
||||
google.registry.model.UpdateAutoTimestamp updateTimestamp;
|
||||
google.registry.persistence.VKey<google.registry.model.domain.DomainBase> superordinateDomain;
|
||||
@@ -438,7 +433,6 @@ class google.registry.model.host.HostHistory {
|
||||
}
|
||||
class google.registry.model.host.HostResource {
|
||||
@Id java.lang.String repoId;
|
||||
com.google.common.collect.ImmutableSortedMap<org.joda.time.DateTime, com.googlecode.objectify.Key<google.registry.model.ofy.CommitLogManifest>> revisions;
|
||||
google.registry.model.CreateAutoTimestamp creationTime;
|
||||
google.registry.model.UpdateAutoTimestamp updateTimestamp;
|
||||
google.registry.persistence.VKey<google.registry.model.domain.DomainBase> superordinateDomain;
|
||||
@@ -481,29 +475,6 @@ class google.registry.model.index.ForeignKeyIndex$ForeignKeyHostIndex {
|
||||
google.registry.persistence.VKey<E> topReference;
|
||||
org.joda.time.DateTime deletionTime;
|
||||
}
|
||||
class google.registry.model.ofy.CommitLogBucket {
|
||||
@Id long bucketNum;
|
||||
org.joda.time.DateTime lastWrittenTime;
|
||||
}
|
||||
class google.registry.model.ofy.CommitLogCheckpoint {
|
||||
@Id long checkpointTime;
|
||||
@Parent com.googlecode.objectify.Key<google.registry.model.ofy.CommitLogCheckpointRoot> parent;
|
||||
java.util.List<org.joda.time.DateTime> bucketTimestamps;
|
||||
}
|
||||
class google.registry.model.ofy.CommitLogCheckpointRoot {
|
||||
@Id long id;
|
||||
org.joda.time.DateTime lastWrittenTime;
|
||||
}
|
||||
class google.registry.model.ofy.CommitLogManifest {
|
||||
@Id long commitTime;
|
||||
@Parent com.googlecode.objectify.Key<google.registry.model.ofy.CommitLogBucket> parent;
|
||||
java.util.Set<com.googlecode.objectify.Key<?>> deletions;
|
||||
}
|
||||
class google.registry.model.ofy.CommitLogMutation {
|
||||
@Id java.lang.String entityKey;
|
||||
@Parent com.googlecode.objectify.Key<google.registry.model.ofy.CommitLogManifest> parent;
|
||||
byte[] entityProtoBytes;
|
||||
}
|
||||
class google.registry.model.poll.PendingActionNotificationResponse$ContactPendingActionNotificationResponse {
|
||||
google.registry.model.eppcommon.Trid trid;
|
||||
google.registry.model.poll.PendingActionNotificationResponse$NameOrId nameOrId;
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
PATH CLASS METHODS OK AUTH_METHODS MIN USER_POLICY
|
||||
/_dr/cron/commitLogCheckpoint CommitLogCheckpointAction GET y INTERNAL,API APP ADMIN
|
||||
/_dr/cron/commitLogFanout CommitLogFanoutAction GET y INTERNAL,API APP ADMIN
|
||||
/_dr/cron/fanout TldFanoutAction GET y INTERNAL,API APP ADMIN
|
||||
/_dr/cron/readDnsQueue ReadDnsQueueAction GET y INTERNAL,API APP ADMIN
|
||||
/_dr/dnsRefresh RefreshDnsAction GET y INTERNAL,API APP ADMIN
|
||||
@@ -10,10 +8,8 @@ PATH CLASS
|
||||
/_dr/task/copyDetailReports CopyDetailReportsAction POST n INTERNAL,API APP ADMIN
|
||||
/_dr/task/deleteExpiredDomains DeleteExpiredDomainsAction GET n INTERNAL,API APP ADMIN
|
||||
/_dr/task/deleteLoadTestData DeleteLoadTestDataAction POST n INTERNAL,API APP ADMIN
|
||||
/_dr/task/deleteOldCommitLogs DeleteOldCommitLogsAction GET n INTERNAL,API APP ADMIN
|
||||
/_dr/task/deleteProberData DeleteProberDataAction POST n INTERNAL,API APP ADMIN
|
||||
/_dr/task/expandRecurringBillingEvents ExpandRecurringBillingEventsAction GET n INTERNAL,API APP ADMIN
|
||||
/_dr/task/exportCommitLogDiff ExportCommitLogDiffAction POST y INTERNAL,API APP ADMIN
|
||||
/_dr/task/exportDomainLists ExportDomainListsAction POST n INTERNAL,API APP ADMIN
|
||||
/_dr/task/exportPremiumTerms ExportPremiumTermsAction POST n INTERNAL,API APP ADMIN
|
||||
/_dr/task/exportReservedTerms ExportReservedTermsAction POST n INTERNAL,API APP ADMIN
|
||||
@@ -31,7 +27,6 @@ PATH CLASS
|
||||
/_dr/task/rdeStaging RdeStagingAction GET,POST n INTERNAL,API APP ADMIN
|
||||
/_dr/task/rdeUpload RdeUploadAction POST n INTERNAL,API APP ADMIN
|
||||
/_dr/task/relockDomain RelockDomainAction POST y INTERNAL,API APP ADMIN
|
||||
/_dr/task/resaveAllEppResources ResaveAllEppResourcesAction GET n INTERNAL,API APP ADMIN
|
||||
/_dr/task/resaveAllEppResourcesPipeline ResaveAllEppResourcesPipelineAction GET n INTERNAL,API APP ADMIN
|
||||
/_dr/task/resaveEntity ResaveEntityAction POST n INTERNAL,API APP ADMIN
|
||||
/_dr/task/sendExpiringCertificateNotificationEmail SendExpiringCertificateNotificationEmailAction GET n INTERNAL,API APP ADMIN
|
||||
|
||||
@@ -10,8 +10,4 @@ PATH CLASS METHODS OK AUTH
|
||||
/_dr/epptool EppToolAction POST n INTERNAL,API APP ADMIN
|
||||
/_dr/loadtest LoadTestAction POST y INTERNAL,API APP ADMIN
|
||||
/_dr/task/generateZoneFiles GenerateZoneFilesAction POST n INTERNAL,API APP ADMIN
|
||||
/_dr/task/killAllCommitLogs KillAllCommitLogsAction POST n INTERNAL,API APP ADMIN
|
||||
/_dr/task/killAllEppResources KillAllEppResourcesAction POST n INTERNAL,API APP ADMIN
|
||||
/_dr/task/refreshDnsForAllDomains RefreshDnsForAllDomainsAction GET n INTERNAL,API APP ADMIN
|
||||
/_dr/task/resaveAllHistoryEntries ResaveAllHistoryEntriesAction GET n INTERNAL,API APP ADMIN
|
||||
/_dr/task/restoreCommitLogs RestoreCommitLogsAction POST y INTERNAL,API APP ADMIN
|
||||
|
||||
Reference in New Issue
Block a user