Skip to content

Commit

Permalink
Nightly Slack Notifications (#174)
Browse files Browse the repository at this point in the history
  • Loading branch information
stanbrub authored Oct 17, 2023
1 parent 20a79ec commit 1d2c92b
Show file tree
Hide file tree
Showing 26 changed files with 842 additions and 36 deletions.
6 changes: 6 additions & 0 deletions .github/resources/nightly-scale-benchmark.properties
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,9 @@ timestamp.test.results=
# Experimental: Docker compose file (e.g. /mypath/docker-compose.yml)
# Empty means no docker restart attempt will be made
docker.compose.file=/root/deephaven/docker-compose.yml

# The url used for posting messages to slack
slack.token=${slackToken}

# The channel to post notifications to
slack.channel=${slackChannel}
40 changes: 40 additions & 0 deletions .github/scripts/run-publish-local.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
#!/usr/bin/env bash

set -o errexit
set -o pipefail
set -o nounset

# Run queries that publish a secret slack channel. Queries operation exclusively
# the deephaven-benchmark GCloud bucket
if [[ $# != 3 ]]; then
echo "$0: Missing run type or slack-channel or slack-uri arguments"
exit 1
fi

CWD=`pwd`
RUN_DIR=${CWD}/publish
GIT_DIR=${CWD}
DEEPHAVEN_DIR=${CWD}
RUN_TYPE=$1
SLACK_CHANNEL=$2
SLACK_TOKEN=$3
BENCH_PROPS_NAME=${RUN_TYPE}-scale-benchmark.properties
BENCH_PROPS_PATH=${GIT_DIR}/.github/resources/${BENCH_PROPS_NAME}

mkdir -p ${RUN_DIR}
cp ${GIT_DIR}/target/deephaven-benchmark-*.jar ${RUN_DIR}/
rm -f ${RUN_DIR}/deephaven-benchmark*-tests.jar
cat ${BENCH_PROPS_PATH} | sed 's|${slackToken}|'"${SLACK_TOKEN}|g" | sed 's|${slackChannel}'"|${SLACK_CHANNEL}|g" > ${RUN_DIR}/${BENCH_PROPS_NAME}

cd ${DEEPHAVEN_DIR}
sudo docker compose down
sudo docker compose up -d
sleep 10

cd ${RUN_DIR}
java -Dbenchmark.profile=${BENCH_PROPS_NAME} -jar deephaven-benchmark-*.jar publish

cd ${DEEPHAVEN_DIR};
sudo docker compose down
sleep 10

8 changes: 4 additions & 4 deletions .github/workflows/mvn-integration-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,20 +25,20 @@ jobs:
- name: Docker Pull Deephaven and Redpanda
run: |
cp .github/resources/integration-docker-compose.yml docker-compose.yml
docker-compose pull
docker compose pull
- name: Docker Up Deephaven and Redpanda
run: docker-compose up -d
run: docker compose up -d

- name: Build with Maven
run: mvn -B verify --file pom.xml

- name: Save Docker Logs
run: |
mkdir logs
docker-compose logs --no-color > logs/docker.log &
docker compose logs --no-color > logs/docker.log &
sleep 10
docker-compose down
docker compose down
- name: Archive Results
uses: actions/upload-artifact@v3
Expand Down
29 changes: 29 additions & 0 deletions .github/workflows/remote-benchmarks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,26 @@ jobs:

steps:
- uses: actions/checkout@v3
- name: Set up JDK 17
uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'temurin'
cache: maven

- name: Docker Pull Deephaven and Redpanda
run: |
cp .github/resources/${RUN_TYPE}-benchmark-docker-compose.yml docker-compose.yml
docker compose pull
- name: Docker Up Deephaven and Redpanda
run: docker compose up -d

- name: Build with Maven
run: |
mvn -B verify --file pom.xml
rm -rf results
- name: Setup Local and Remote Scripts
run: |
sudo chmod +x ${SD}/*
Expand Down Expand Up @@ -83,6 +103,14 @@ jobs:
run: |
gcloud compute ssh --zone "us-central1-a" --project "deephaven-oss" dhc-demo-nfs-client --command="gsutil -m -q rsync -r -d -J -x '^.*/test-logs/.*$' gs://deephaven-benchmark /nfs/deephaven-benchmark"
- name: Sync GCloud with Demo NFS
run: |
gcloud compute ssh --zone "us-central1-a" --project "deephaven-oss" dhc-demo-nfs-client --command="gsutil -m -q rsync -r -d -J -x '^.*/test-logs/.*$' gs://deephaven-benchmark /nfs/deephaven-benchmark"
- name: Publish Slack Notification Tables
run: |
${SD}/run-publish-local.sh ${RUN_TYPE} "${{secrets.BENCHMARK_SLACK_CHANNEL}}" "${{secrets.BENCHMARK_SLACK_TOKEN}}"
- name: Archive Results
uses: actions/upload-artifact@v3
with:
Expand All @@ -91,3 +119,4 @@ jobs:
${{env.RUN_TYPE}}/**/*
logs/**/*
2 changes: 1 addition & 1 deletion docs/BenchmarkDemo.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ with [ChartBuilder](https://deephaven.io/core/docs/how-to-guides/user-interface/
or experiment with [the scripted UI](https://deephaven.io/core/docs/how-to-guides/plotting/category/).

```python
from urllib.request import urlopen
from urllib.request import urlopen; import os

root = 'file:///nfs' if os.path.exists('/nfs/deephaven-benchmark') else 'https://storage.googleapis.com'
with urlopen(root + '/deephaven-benchmark/benchmark_tables.dh.py') as r:
Expand Down
4 changes: 2 additions & 2 deletions docs/PublicSummaryWorkflow.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,8 @@ after a successful build.
- A GCP drive is created.
- Your benchmark stuff gets the data to the GCP drive using some mechanism such as gsutil rsync or by mounting the drive into the machine running the tests and writing directly.
- The demo system will create a Kube persistent volume that refers to the GCP drive.
- When a worker is spun up, it mounts the Kube persistent volume so that it can be seen locally on a path such as /data/benchmark
- A script or jupyter file can then access the data via the mounted /data/benchmark path.
- When a worker is spun up, it mounts the Kube persistent volume so that it can be seen locally on a path such as /nfs/benchmark
- A script or jupyter file can then access the data via the mounted /nfs/benchmark path.

Resources:
- https://devopscube.com/persistent-volume-google-kubernetes-engine/
Expand Down
2 changes: 1 addition & 1 deletion docs/PublishedResults.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ The easiest way to access the published benchmark results is by running the foll
in an instance of the Deephaven Engine.

````
from urllib.request import urlopen
from urllib.request import urlopen; import os
root = 'file:///nfs' if os.path.exists('/nfs/deephaven-benchmark') else 'https://storage.googleapis.com'
with urlopen(root + '/deephaven-benchmark/benchmark_tables.dh.py') as r:
Expand Down
4 changes: 2 additions & 2 deletions src/main/java/io/deephaven/benchmark/api/Bench.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
* sets of test packages to run on different systems simultaneously.
*/
final public class Bench {
static final public String rootOutputDir = "results";
static final public Path rootOutputDir = Paths.get("results");
static final public String resultFileName = "benchmark-results.csv";
static final public String metricsFileName = "benchmark-metrics.csv";
static final public String platformFileName = "benchmark-platform.csv";
Expand Down Expand Up @@ -233,7 +233,7 @@ <T extends Future<Metrics>> T addFuture(T future) {
static private Path initializeOutputDirectory() {
setSystemProperties();
boolean isTimestamped = profile.propertyAsBoolean("timestamp.test.results", "false");
Path dir = Paths.get(rootOutputDir);
Path dir = rootOutputDir;
if (isTimestamped)
dir = dir.resolve(Ids.runId());
Filer.delete(dir);
Expand Down
118 changes: 109 additions & 9 deletions src/main/java/io/deephaven/benchmark/connect/CachedResultTable.java
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,43 @@
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
import io.deephaven.benchmark.util.Dates;
import io.deephaven.benchmark.util.Numbers;
import io.deephaven.engine.rowset.RowSet;
import io.deephaven.engine.table.Table;

/**
* Create an in-memory table from either CSV or a Deephaven table. Provides some basic accessors for getting column
* values. No data typing is done on import of the data. Use typed methods like {@code getNumber()} to convert from
* whatever row value came from the import.
* <p/>
* Note: This class is not a general purpose class for reading CSV or Deephaven Table data. It fits specific cases used
* by the Benchmark framework.
*/
public class CachedResultTable implements ResultTable {

/**
* Create an in-memory table instance from basic CSV. Does not handle quotes and is mainly used for testing. Skips
* any lines that do not have the same items as the header and trims all row items. No attempt is made to determine
* data types.
*
* @param csv basic csv with a header and columns
* @param delim
* @return a cached result table instance
*/
static public ResultTable create(String csv, String delim) {
var importer = new CsvImporter(csv, delim);
return new CachedResultTable(importer.columns(), importer.rows());
int minRowColumnCount = importer.columns().size();
return new CachedResultTable(importer.columns(), importer.rows(minRowColumnCount));
}

/**
* Create an in-memory table instance from a Deephaven Engine Table. Whatever datatype is read from the Table is
* stored in this cache.
*
* @param table a Deephaven table (likely procured from a subscription)
* @return a cached result table
*/
static public ResultTable create(Table table) {
var importer = new EngineTableImporter(table);
return new CachedResultTable(importer.columns(), importer.rows());
Expand All @@ -31,7 +57,7 @@ static public ResultTable create(Table table) {
}

public List<String> getColumnNames() {
return Collections.unmodifiableList(columns);
return getNonFormatColumns(columns);
}

public int getRowCount() {
Expand All @@ -41,7 +67,19 @@ public int getRowCount() {
public Object getValue(int rowIndex, String columnName) {
if (rowIndex >= rows.size())
return null;
return rows.get(rowIndex).get(getColumnIndex(columnName));
var value = rows.get(rowIndex).get(getColumnIndex(columnName));
var regex = columnName + "__.*_FORMAT";
var formatNames = columns.stream().filter(c -> c.matches(regex)).toList();
if (!formatNames.isEmpty()) {
var formatVal = rows.get(rowIndex).get(getColumnIndex(formatNames.get(0)));
var formatName = formatNames.get(0).replace(columnName + "__", "");
value = formatValue(value, formatName, formatVal.toString());
}
return value;
}

public List<Object> getRow(int rowIndex, List<String> columnNames) {
return getNonFormatColumns(columnNames).stream().map(c -> getValue(rowIndex, c)).toList();
}

public Number getNumber(int rowIndex, String columnName) {
Expand All @@ -53,8 +91,7 @@ public ResultTable findRows(String columnName, Object value) {
var matchedRows = new ArrayList<List<Object>>();

int index = getColumnIndex(columnName);
for (int i = 0, n = rows.size(); i < n; i++) {
List<Object> row = rows.get(i);
for (List<Object> row : rows) {
if (row.size() <= index)
continue;
if (row.get(index).equals(value))
Expand All @@ -68,13 +105,78 @@ public Number getSum(String columnName) {
return rows.stream().mapToDouble(row -> Double.parseDouble(row.get(index).toString())).sum();
}

public String toCsv(String delim) {
return toCsv(delim, null);
}

public String toCsv(String delim, String alignment) {
var alignMeta = getColumnAlignments(alignment);
var csv = new StringBuilder();
var columnNames = getColumnNames();
csv.append(String.join(delim, applyAlignments(columnNames, alignMeta))).append('\n');
for (int i = 0, n = getRowCount(); i < n; i++) {
if (i > 0)
csv.append('\n');
List<Object> row = getRow(i, columnNames);
List<String> newRow = applyAlignments(row, alignMeta);
csv.append(String.join(delim, newRow));
}
return csv.toString();
}

private String formatValue(Object value, String formatName, String formatValue) {
if (formatName.equals("TABLE_DATE_FORMAT"))
return Dates.formatDate(value, formatValue);
if (formatName.equals("TABLE_NUMBER_FORMAT"))
return Numbers.formatNumber(value, formatValue);
throw new RuntimeException("Unsupported table format: " + formatName);
}

private int getColumnIndex(String columnName) {
int index = columns.indexOf(columnName);
if (index < 0)
throw new RuntimeException("Undefined column name: " + columnName);
return index;
}

private List<String> getNonFormatColumns(List<String> columns) {
var regex = ".*__TABLE_.*_FORMAT";
return columns.stream().filter(n -> !n.matches(regex)).toList();
}

private List<Alignment> getColumnAlignments(String alignDescr) {
if (alignDescr == null || alignDescr.isBlank())
return null;
var columns = getColumnNames();
var alignments = new ArrayList<Alignment>(columns.size());
for (int c = 0, cn = columns.size(); c < cn; c++) {
var alignChar = (c >= alignDescr.length()) ? 'R' : alignDescr.charAt(c);
int maxWidth = columns.get(c).length();
for (int i = 0, n = getRowCount(); i < n; i++) {
maxWidth = Math.max(getValue(i, columns.get(c)).toString().length(), maxWidth);
}
alignments.add(new Alignment(alignChar, maxWidth));
}
return alignments;
}

private List<String> applyAlignments(List<?> row, List<Alignment> alignMeta) {
if (alignMeta == null || alignMeta.isEmpty())
return row.stream().map(c -> c.toString()).toList();
var newRow = new ArrayList<String>(row.size());
for (int i = 0, n = alignMeta.size(); i < n; i++) {
var value = row.get(i).toString();
var align = alignMeta.get(i);
var space = " ".repeat(align.width - value.length());
value = (alignMeta.get(i).direction == 'L') ? (value + space) : (space + value);
newRow.add(value);
}
return newRow;
}

record Alignment(char direction, int width) {
}

static class CsvImporter {
final List<String> lines;
final String delim;
Expand All @@ -90,10 +192,8 @@ List<String> columns() {
return row(lines.get(0), delim).stream().map(s -> s.toString()).toList();
}

List<List<Object>> rows() {
if (lines.size() < 3)
return Collections.emptyList();
return lines.stream().skip(2).map(line -> row(line, delim)).toList();
List<List<Object>> rows(int minColumns) {
return lines.stream().skip(1).map(line -> row(line, delim)).filter(r -> r.size() >= minColumns).toList();
}

private List<Object> row(String line, String delim) {
Expand Down
30 changes: 30 additions & 0 deletions src/main/java/io/deephaven/benchmark/connect/ResultTable.java
Original file line number Diff line number Diff line change
Expand Up @@ -60,4 +60,34 @@ public interface ResultTable {
* @return all rows that match
*/
public ResultTable findRows(String columnName, Object value);

/**
* Return values for the given columns in the order they are listed
*
* @param rowIndex the row position starting at 0
* @param columNames the column names to use for value retrieval
* @return the column values for the row
*/
public List<Object> getRow(int rowIndex, List<String> columnNames);

/**
* Return a string containing the Csv representation of this table.
*
* @param delim the delimiter to use between column
* @return a csv string
*/
public String toCsv(String delim);

/**
* Return a string containing the Csv representation of this table. Allow setting column justification to right or
* left depending on the 'R' or 'L' specified in the alignment description.
* <p/>
* ex. "|", "LRRRRL" where there are 6 columns in the data
*
* @param delim the delimiter to use between column
* @param alignDescr a string of R's (right justified) and L's (left justified for each column
* @return a csv string
*/
public String toCsv(String delim, String alignDescr);

}
Loading

0 comments on commit 1d2c92b

Please sign in to comment.