Skip to content

Commit

Permalink
Merge branch 'dev' into chicago-rta-fares
Browse files Browse the repository at this point in the history
  • Loading branch information
ansoncfit committed Nov 27, 2023
2 parents 4403e8a + 1b322b4 commit 1649e89
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 24 deletions.
21 changes: 14 additions & 7 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ plugins {
id 'application'
id 'maven-publish'
id 'com.palantir.git-version' version '2.0.0'
id 'com.github.johnrengelman.shadow' version '8.1.1'
}

group = 'com.conveyal'
Expand All @@ -19,7 +20,7 @@ jar {
// For Java 11+ Modules, specify a module name.
// Do not create module-info.java until all our dependencies specify a module name.
// Main-Class BackendMain will start a local backend.
// Build-Jdk-Spec mimics a Maven manifest entry that helps us automatically install the right JVM.
// Build-Jdk-Spec mimics a Maven manifest entry that helps us automatically install or select the right JVM.
// Implementation-X attributes are needed for ImageIO (used by Geotools) to initialize in some environments.
manifest {
attributes 'Automatic-Module-Name': 'com.conveyal.r5',
Expand All @@ -31,6 +32,10 @@ jar {
}
}

shadowJar {
mergeServiceFiles()
}

// Allow reflective access by ObjectDiffer to normally closed Java internals. Used for round-trip testing serialization.
// IntelliJ seems not to pass these JVM arguments when running tests from within the IDE, so the Kryo serialization
// tests may only succeed under command line Gradle.
Expand All @@ -42,8 +47,8 @@ test {
'--add-opens=java.base/java.lang=ALL-UNNAMED']
}

// `gradle publish` will upload both shadow and simple JAR to Github Packages
// On GH Actions, GITHUB_ACTOR env variable is supplied without specifying it in action yml.
// Set up publication of jar files to GitHub Packages Maven repository.
// On GitHub Actions, GITHUB_ACTOR env variable is supplied without specifying it in action yml.
publishing {
repositories {
maven {
Expand All @@ -56,10 +61,12 @@ publishing {
}
}
publications {
// The presence of the shadow plugin somehow causes the shadow-jar to also be automatically included in this
// publication. Ideally we want to produce the shadow jar and upload it to S3 as a worker, but only publish the
// much smaller plain JAR without dependencies to Github Packages. On the other hand, we may want to publish
// shadow jars for tagged releases.
// The shadow plugin automatically creates and registers a component called "shadow" for integration with this
// Maven publish plugin. `gradle publish` will then upload both shadow jar and simple jar to Github Packages.
// See https://imperceptiblethoughts.com/shadow/getting-started/#default-java-groovy-tasks
// To run R5 with dependencies, Conveyal does not use shadow jars anymore, only the zip distribution or runBackend.
// For development builds and tests we don't need to produce a shadow jar, only publish the much smaller plain
// jar without dependencies to Github Packages. For now, we continue to attach shadow jars to tagged releases.
gpr(MavenPublication) {
from(components.java)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,12 @@ public abstract class CsvResultWriter extends BaseResultWriter implements Region
*/
public abstract CsvResultType resultType ();

/** Override to provide column names for this CSV writer. */
/**
* Override to provide column names for this CSV writer.
* NOTE: Due to Java weirdness, subclass implementations of this method will be called by the CsvResultWriter
* constructor at a time when fields of the subclass remain initialized, but uninitialized final primitive
* fields are still readable! Do not read subclass fields in these implementations until/unless this is restructured.
*/
protected abstract String[] columnHeaders ();

/** Override to extract row values from a single origin result. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,15 +31,15 @@ public CsvResultType resultType () {
public String[] columnHeaders () {
List<String> headers = new ArrayList<>();
// The ids of the freeform origin point and destination set
headers.add("originId");
headers.add("destId");
headers.add("origin");
headers.add("destinations");
headers.add("percentile");
// The number of minutes needed to reach d destination opportunities
headers.add("dual");
// The opportunity density during each of 120 minutes
for (int m = 0; m < 120; m += 1) {
// The opportunity density over travel minute m
headers.add(Integer.toString(m));
}
// The number of minutes needed to reach d destination opportunities
headers.add("D" + dualThreshold);
return headers.toArray(new String[0]);
}

Expand Down Expand Up @@ -67,20 +67,24 @@ public Iterable<String[]> rowValues (RegionalWorkResult workResult) {
List<String> row = new ArrayList<>(125);
row.add(originId);
row.add(task.destinationPointSetKeys[d]);
row.add(Integer.toString(p));
// One density value for each of 120 minutes
row.add(Integer.toString(task.percentiles[p]));
// One column containing dual accessibility value
double[] densitiesPerMinute = percentilesForDestPointset[p];
for (int m = 0; m < 120; m++) {
row.add(Double.toString(densitiesPerMinute[m]));
}
// One dual accessibility value
int m = 0;
double sum = 0;
// Find smallest integer M such that we have already reached D destinations after M minutes of travel.
while (sum < dualThreshold && m < 120) {
sum += densitiesPerMinute[m];
m += 1;
}
// -1 indicates the threshold number of opportunities had still not been reached after the highest
// travel time cutoff specified in the analysis.
row.add(Integer.toString(m >= 120 ? -1 : m));
// One density value for each of 120 one-minute bins.
// Column labeled 10 contains the number of opportunities reached after 10 to 11 minutes of travel.
for (m = 0; m < 120; m++) {
row.add(Double.toString(densitiesPerMinute[m]));
}
rows.add(row.toArray(new String[row.size()]));
}
}
Expand Down
12 changes: 7 additions & 5 deletions src/main/java/com/conveyal/r5/kryo/KryoNetworkSerializer.java
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,13 @@ public abstract class KryoNetworkSerializer {
* the serialization format itself does not change. This will ensure newer workers will not load cached older files.
* We considered using an ISO date string as the version but that could get confusing when seen in filenames.
*
* History of Network Version (NV) changes:
* nv4 2023-11-02 WebMercatorGridPointSet now contains nested WebMercatorExtents
* nv3 2023-01-18 use Kryo 5 serialization format
* nv2 2022-04-05
* nv1 2021-04-30 stopped using r5 version string (which caused networks to be rebuilt for every new r5 version)
* History of Network Version (NV) changes (in production releases):
* nv3 since v7.0: switched to Kryo 5 serialization, WebMercatorGridPointSet now contains nested WebMercatorExtents
* nv2 since 2022-04-05
* nv1 since 2021-04-30: stopped rebuilding networks for every new r5 version, manually setting this version string
*
* When prototyping new features, use a unique identifier such as the branch or a commit ID, not sequential nvX ones.
* This avoids conflicts when multiple changes are combined in a single production release, or some are abandoned.
*/
public static final String NETWORK_FORMAT_VERSION = "nv3";

Expand Down

0 comments on commit 1649e89

Please sign in to comment.