Skip to content

Commit

Permalink
Removed job MigrateRowsFromFile which was earlier used to load PK rec…
Browse files Browse the repository at this point in the history
…ords from a file, search them in origin & Migrate those to Target.

The file with PK records was generated with the PK validation errors found.
However this feature is no longer needed with many advanced features like validation auto-correct, rerun migration/validation only on token-ranges with diff, etc.
  • Loading branch information
pravinbhat committed Jul 18, 2024
1 parent 421e7db commit ceaa216
Show file tree
Hide file tree
Showing 29 changed files with 0 additions and 445 deletions.
2 changes: 0 additions & 2 deletions SIT/features/07_migrate_rows/cdm.txt

This file was deleted.

19 changes: 0 additions & 19 deletions SIT/features/07_migrate_rows/execute.sh

This file was deleted.

13 changes: 0 additions & 13 deletions SIT/features/07_migrate_rows/expected.cql

This file was deleted.

7 changes: 0 additions & 7 deletions SIT/features/07_migrate_rows/expected.out

This file was deleted.

16 changes: 0 additions & 16 deletions SIT/features/07_migrate_rows/migrate.properties

This file was deleted.

18 changes: 0 additions & 18 deletions SIT/features/07_migrate_rows/migrate_with_pkrowsfile.properties

This file was deleted.

This file was deleted.

2 changes: 0 additions & 2 deletions SIT/features/07_migrate_rows/primary_key_rows.csv

This file was deleted.

20 changes: 0 additions & 20 deletions SIT/features/07_migrate_rows/setup.cql

This file was deleted.

114 changes: 0 additions & 114 deletions src/main/java/com/datastax/cdm/job/CopyPKJobSession.java

This file was deleted.

34 changes: 0 additions & 34 deletions src/main/java/com/datastax/cdm/job/CopyPKJobSessionFactory.java

This file was deleted.

33 changes: 0 additions & 33 deletions src/main/java/com/datastax/cdm/job/SplitPartitions.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;

Expand Down Expand Up @@ -94,25 +93,6 @@ public PartitionMinMax(String line) {
}
}

public static List<PKRows> getRowPartsFromFile(int numSplits, String inputFilename) throws IOException {
logger.info("ThreadID: {} Splitting rows in file: {} using a split-size of {}"
, Thread.currentThread().getId(), inputFilename, numSplits);
List<String> pkRows = new ArrayList<String>();
BufferedReader reader = getfileReader(inputFilename);
String pkRow = null;
while ((pkRow = reader.readLine()) != null) {
if (pkRow.startsWith("#")) {
continue;
}
pkRows.add(pkRow);
}
int partSize = pkRows.size() / numSplits;
if (partSize == 0) {
partSize = pkRows.size();
}
return batches(pkRows, partSize).map(l -> (new PKRows(l))).collect(Collectors.toList());
}

public static <T> Stream<List<T>> batches(List<T> source, int length) {
if (length <= 0)
throw new IllegalArgumentException("length = " + length);
Expand Down Expand Up @@ -188,19 +168,6 @@ public static String getPartitionFileOutput(PropertyHelper propertyHelper) {
return "./" + propertyHelper.getString(KnownProperties.ORIGIN_KEYSPACE_TABLE) + "_partitions.csv";
}

public static class PKRows implements Serializable {
private static final long serialVersionUID = 1L;
private List<String> pkRows;

public List<String> getPkRows() {
return pkRows;
}

public PKRows(List<String> rows) {
pkRows = new ArrayList<>(rows);
}
}

public static class Partition implements Serializable {
private static final long serialVersionUID = 1L;

Expand Down
27 changes: 0 additions & 27 deletions src/main/scala/com/datastax/cdm/job/BasePKJob.scala

This file was deleted.

33 changes: 0 additions & 33 deletions src/main/scala/com/datastax/cdm/job/MigrateRowsFromFile.scala

This file was deleted.

Loading

0 comments on commit ceaa216

Please sign in to comment.