From 23b3fb304ac306a6cea679cc3b1d4ce29f4a60dd Mon Sep 17 00:00:00 2001 From: Kevin Milner Date: Wed, 28 Jan 2026 11:04:04 -0800 Subject: [PATCH 01/13] initial down-dip implementation and FaultSection interface cleanup --- .../nshm26/DownDipInterfaceSubSectTests.java | 532 ++++++++++++++++++ .../scratch/kevin/ucerf3/PureScratch.java | 16 +- .../ETAS_ERF/testModels/TestModel1_FSS.java | 2 +- .../ETAS_ERF/testModels/TestModel2_FSS.java | 2 +- .../ETAS_ERF/testModels/TestModel3_FSS.java | 2 +- .../FaultSystemRuptureRateInversion.java | 20 +- .../SimpleFaultInversion.java | 6 +- .../SoSAF_SubSectionInversion.java | 3 +- .../ClassicSrcAnalysis/DoAnaylysis.java | 4 +- .../CreateRupturesFromSections.java | 31 +- .../OldCreateRupturesFromSections.java | 21 +- .../OLD_STUFF/OldSectionCluster.java | 57 +- .../ned/rupsInFaultSystem/SectionCluster.java | 61 +- .../CreateRupturesFromSections.java | 29 +- .../rupsInFaultSystem/SectionCluster.java | 55 +- 15 files changed, 697 insertions(+), 144 deletions(-) create mode 100644 src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java diff --git a/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java b/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java new file mode 100644 index 00000000..8b8e20d9 --- /dev/null +++ b/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java @@ -0,0 +1,532 @@ +package scratch.kevin.nshm26; + +import java.awt.Color; +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; +import java.text.DecimalFormat; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.StringTokenizer; + +import org.apache.commons.math3.util.Precision; +import org.opensha.commons.geo.Location; +import org.opensha.commons.geo.LocationList; +import org.opensha.commons.geo.LocationUtils; +import org.opensha.commons.geo.Region; +import org.opensha.commons.gui.plot.GeographicMapMaker; +import org.opensha.commons.gui.plot.PlotCurveCharacterstics; +import org.opensha.commons.gui.plot.PlotLineType; +import org.opensha.commons.gui.plot.PlotSymbol; +import org.opensha.commons.mapping.gmt.elements.GMT_CPT_Files; +import org.opensha.commons.util.DataUtils; +import org.opensha.commons.util.DataUtils.MinMaxAveTracker; +import org.opensha.commons.util.FaultUtils; +import org.opensha.commons.util.cpt.CPT; +import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_SubductionScalingRelationships; +import org.opensha.sha.faultSurface.DownDipSubsectionBuilder; +import org.opensha.sha.faultSurface.FaultTrace; +import org.opensha.sha.faultSurface.GeoJSONFaultSection; + +import com.google.common.base.Preconditions; +import com.google.common.collect.Range; +import com.google.common.io.Files; + +import net.mahdilamb.colormap.Colors; + +public class DownDipInterfaceSubSectTests { + + public static void main(String[] args) throws IOException { + File inFile = new File("/tmp/ker_slab2_dep_10km_contours.xyz"); + String prefix = "ker_slab2"; + Range depthRange = Range.closed(0d, 60d); + Range lonFilter = null; + Range latFilter = Range.atLeast(-30d); + +// File inFile = new File("/tmp/izu_slab2_dep_10km_contours.xyz"); +// String prefix = "izu_slab2"; +// Range depthRange = Range.closed(0d, 60d); +// Range lonFilter = null; +// Range latFilter = Range.atMost(27d); + + File outputDir = inFile.getParentFile(); + + double scaleLength = 20d; + boolean scaleIsMax = false; + boolean constantCount = false; + + List rawContours = loadDepthContours(inFile); + + if (lonFilter != null || latFilter != null) + rawContours = filterContours(rawContours, latFilter, lonFilter); + + MinMaxAveTracker latRange = new MinMaxAveTracker(); + MinMaxAveTracker lonRange = new MinMaxAveTracker(); + for (FaultTrace trace : rawContours) { + for (Location loc : trace) { + latRange.addValue(loc.lat); + lonRange.addValue(loc.lon); + } + } + + Region mapReg = new Region(new Location(latRange.getMin()-1d, lonRange.getMin()-1d), + new Location(latRange.getMax()+1d, lonRange.getMax()+1d)); + GeographicMapMaker mapMaker = new GeographicMapMaker(mapReg); + + mapMaker.setWritePDFs(false); + + List lines = new ArrayList<>(); + List chars = new ArrayList<>(); + + PlotCurveCharacterstics rawChar = new PlotCurveCharacterstics(PlotLineType.SOLID, 1f, Color.BLACK); + for (FaultTrace raw : rawContours) { + lines.add(raw); + chars.add(rawChar); + } + mapMaker.plotLines(lines, chars); + + mapMaker.plot(outputDir, prefix+"_raw", " "); + List> stitchedIndividual = new ArrayList<>(); + List depthContours = processContours(rawContours, stitchedIndividual); +// for (int d=0; d depthRange = Range.closedOpen(0d, 30d); + + GeoJSONFaultSection refSect = new GeoJSONFaultSection.Builder(0, "Test Fault", depthContours.get(0)) + .dip(90d).upperDepth(depthRange.lowerEndpoint()).lowerDepth(depthRange.upperEndpoint()) + .rake(90).build(); + + lines.clear(); + chars.clear(); + PlotCurveCharacterstics processedChar = new PlotCurveCharacterstics(PlotLineType.SOLID, 1f, Color.BLACK); + for (FaultTrace processed : depthContours) { + lines.add(processed); + chars.add(processedChar); + } + PlotCurveCharacterstics connectorChar = new PlotCurveCharacterstics(PlotLineType.SOLID, 2f, Colors.tab_red); + for (List stitched : stitchedIndividual) { + for (int i=1; i startEndChars = new ArrayList<>(); + PlotCurveCharacterstics startChar = new PlotCurveCharacterstics(PlotSymbol.FILLED_SQUARE, 2f, Colors.tab_blue); + PlotCurveCharacterstics endChar = new PlotCurveCharacterstics(PlotSymbol.FILLED_CIRCLE, 2f, Colors.tab_green); + for (FaultTrace trace : depthContours) { + startEnds.add(trace.first()); + startEndChars.add(startChar); + } + for (FaultTrace trace : depthContours) { + startEnds.add(trace.last()); + startEndChars.add(endChar); + } + mapMaker.plotScatters(startEnds, startEndChars); + + mapMaker.plot(outputDir, prefix+"_contour_stitch", " "); + mapMaker.clearScatters(); + + lines.clear(); + chars.clear(); + + rawChar = new PlotCurveCharacterstics(PlotLineType.SOLID, 1f, Color.LIGHT_GRAY); + for (FaultTrace raw : rawContours) { + lines.add(raw); + chars.add(rawChar); + } + + List interpTraces = DownDipSubsectionBuilder.interpolateDepthTraces( + depthContours, 2, scaleLength, scaleIsMax, depthRange); + System.out.println("Built "+interpTraces.size()+" interpolated traces"); + for (FaultTrace trace : interpTraces) + System.out.println("\t"+traceStr(trace)); + + PlotCurveCharacterstics interpChar = new PlotCurveCharacterstics(PlotLineType.SOLID, 1.2f, Color.BLACK); + for (FaultTrace interp : interpTraces) { + lines.add(interp); + chars.add(interpChar); + } + + mapMaker.plot(outputDir, prefix+"_contour_interp", " "); + + GeoJSONFaultSection[][] subSects; + if (constantCount) + subSects = DownDipSubsectionBuilder.buildForConstantCountAlong( + refSect, interpTraces, 2, scaleLength, 0); + else + subSects = DownDipSubsectionBuilder.buildForConstantLength( + refSect, interpTraces, 2, scaleLength, scaleIsMax, 0); + + mapMaker.setFillSurfaces(true); + + List allSects = new ArrayList<>(); + MinMaxAveTracker lenRange = new MinMaxAveTracker(); + MinMaxAveTracker ddwRange = new MinMaxAveTracker(); + MinMaxAveTracker areaRange = new MinMaxAveTracker(); + MinMaxAveTracker magRange = new MinMaxAveTracker(); + MinMaxAveTracker dipRange = new MinMaxAveTracker(); + MinMaxAveTracker perRowRange = new MinMaxAveTracker(); + for (GeoJSONFaultSection[] row : subSects) { + perRowRange.addValue(row.length); + for (GeoJSONFaultSection sect : row) { + double length = sect.getTraceLength(); + double ddw = sect.getOrigDownDipWidth(); + double area = sect.getArea(false)*1e-6; + lenRange.addValue(length); + ddwRange.addValue(ddw); + areaRange.addValue(area); + magRange.addValue(PRVI25_SubductionScalingRelationships.LOGA_C4p0.getMag(area*1e6, length*1e3, ddw*1e3, ddw*1e3, 90d)); + dipRange.addValue(sect.getAveDip()); + allSects.add(sect); + } + } + mapMaker.setFaultSections(allSects); + + int sectMod = 20; + CPT cpt = GMT_CPT_Files.RAINBOW_UNIFORM.instance().rescale(0d, sectMod-1d); + + mapMaker.plotSectScalars(s -> {return (double)(s.getSubSectionIndex() % sectMod);}, + cpt, null); +// mapMaker.plotSectScalarsByIndex(I -> {return (double)(I % sectMod);}, +// cpt, "Subsection Index"); + + lines.clear(); + chars.clear(); + + rawChar = new PlotCurveCharacterstics(PlotLineType.SOLID, 1f, Color.LIGHT_GRAY); + for (FaultTrace raw : rawContours) { + lines.add(raw); + chars.add(rawChar); + } + mapMaker.setPlotSectsOnTop(true); + +// System.out.println("First sect:\n" +// +allSects.get(0).toFeature().toJSON()+"\n"); +// System.out.println("Second sect:\n" +// +allSects.get(1).toFeature().toJSON()+"\n"); +// System.out.println("Last sect:\n" +// +allSects.get(allSects.size()-1).toFeature().toJSON()+"\n"); + + System.out.println("Sub-Sect Lenghts:\t"+lenRange); + System.out.println("Sub-Sect DDWs:\t"+ddwRange); + System.out.println("Sub-Sect Areas:\t"+areaRange); + System.out.println("Sub-Sect Mmin:\t"+magRange); + System.out.println("Sub-Sect Dip:\t"+dipRange); + System.out.println("Sub-Sect Row counts:\t"+perRowRange); + + mapMaker.plot(outputDir, prefix+"_sub_sects", " "); + } + + private static List processContours(List contours) { + return processContours(contours, null); + } + + private static List processContours(List contours, List> stitchedIndividual) { + // group by depth + List> depthGrouped = new ArrayList<>(); + double curDepth = Double.NaN; + List curTraces = null; + for (FaultTrace trace : contours) { + double depth = trace.first().depth; + if (curTraces == null || !Precision.equals(depth, curDepth, 0.1)) { + curTraces = new ArrayList<>(); + curDepth = depth; + depthGrouped.add(curTraces); + } + curTraces.add(trace); + } + + // sort by depth increasing + Collections.sort(depthGrouped, (o1, o2) -> Double.compare(o1.get(0).first().depth, o2.get(0).first().depth)); + + // figure out overall strike direction using the middle trace + List middleGroup = depthGrouped.get(depthGrouped.size()/2); + Location[] furthestPair = null; + double furthestDist = 0d; + if (middleGroup.size() == 1) { + FaultTrace trace = middleGroup.get(0); + furthestDist = LocationUtils.horzDistanceFast(trace.first(), trace.last()); + furthestPair = new Location[] { trace.first(), trace.last() }; + } else { + for (int i=0; i furthestDist) { + furthestDist = dist; + furthestPair = new Location[] { l1, l2 }; + } + } + } + } + } + } + // that gives us an overall strike direction, but could be reversed relative to the RHR + double strike = LocationUtils.azimuth(furthestPair[0], furthestPair[1]); + double[] downAzimuths = new double[depthGrouped.size()-1]; + Location upperMiddle = avgLocation(depthGrouped.get(0)); + for (int i=1; i stitched = new ArrayList<>(); + for (List depthTraces : depthGrouped) { + double depth = depthTraces.get(0).first().depth; + System.out.println(oDF.format(depth)+" km: "+depthTraces.size()+" contours"); + + if (depthTraces.size() == 1) { + // simple case + FaultTrace trace = depthTraces.get(0); + double az = trace.getStrikeDirection(); + boolean reverse = shouldReverse(strike, az, 70d); + System.out.println("\taz="+oDF.format(az)+", reverse="+reverse); + if (reverse) + trace = getReversed(trace); + stitched.add(trace); + continue; + } + + // find the most central trace and work out from there + Location centerLoc = avgLocation(depthTraces); + + int centralIndex = -1; + double centerDist = Double.POSITIVE_INFINITY; + + for (int i=0; i candidates = new ArrayList<>(depthTraces); + FaultTrace central = candidates.remove(centralIndex); + List stitchedList = new ArrayList<>(depthTraces.size()); + stitchedList.add(central); + Location curFirst = central.first(); + Location curLast = central.last(); + while (!candidates.isEmpty()) { + // find the closest location to either end + int closestIndex = -1; + boolean closestReversed = false; + boolean closestToFirst = false; + double minDist = Double.POSITIVE_INFINITY; + for (int i=0; i filterContours(List contours, Range latRange, Range lonRange) { + List ret = new ArrayList<>(); + + for (FaultTrace trace : contours) { + int firstInside = -1; + int lastInside = -1; + boolean allInside = true; + boolean noneInside = true; + for (int i=0; i loadDepthContours(File inFile) throws IOException { + List ret = new ArrayList<>(); + FaultTrace curTrace = null; + for (String line : Files.readLines(inFile, Charset.defaultCharset())) { + line = line.trim(); + if (line.startsWith(">")) { + // new contour + curTrace = new FaultTrace(); + ret.add(curTrace); + continue; + } + StringTokenizer tok = new StringTokenizer(line); + Preconditions.checkState(tok.countTokens() == 3); + double lon = Double.parseDouble(tok.nextToken()); + double lat = Double.parseDouble(tok.nextToken()); + double depth = -Double.parseDouble(tok.nextToken()); + curTrace.add(new Location(lat, lon, depth));; + } + Collections.reverse(ret); + return ret; + } + + private static final DecimalFormat twoDF = new DecimalFormat("0.00"); + private static final DecimalFormat oDF = new DecimalFormat("0.##"); + + private static String traceStr(FaultTrace trace) { + boolean resampled = false; + if (trace.size() > 5) { + trace = FaultUtils.resampleTrace(trace, 4); + resampled = true; + } + StringBuilder str = null; + for (Location loc : trace) { + if (str == null) + str = new StringBuilder(); + else + str.append(" "); + str.append("[").append(twoDF.format(loc.lat)).append(", ").append(twoDF.format(loc.lon)) + .append(", ").append(oDF.format(loc.depth)).append("]"); + } + str.append("; strike=").append(oDF.format(LocationUtils.azimuth(trace.first(), trace.last()))); + if (resampled) + str.append(" (resampled)"); + return str.toString(); + } + + private static Location avgLocation(LocationList locs) { + return avgLocation(List.of(locs)); + } + + private static Location avgLocation(List locLists) { + double sumLat = 0d; + double sumLon = 0d; + double sumDepth = 0d; + int count = 0; + for (LocationList list : locLists) { + for (Location loc : list) { + count++; + sumLat += loc.lat; + sumLon += loc.lon; + sumDepth += loc.depth; + } + } + return new Location(sumLat/(double)count, sumLon/(double)count, sumDepth/(double)count); + } + + private static final double AZ_TOL_DEFAULT = 50d; + + private static FaultTrace getOriented(FaultTrace trace, double overallStrike) { + double traceStrike = trace.getStrikeDirection(); + if (shouldReverse(overallStrike, traceStrike, AZ_TOL_DEFAULT)) + return getReversed(trace); + return trace; + } + + private static boolean shouldReverse(double refStrike, double testStrike) { + return shouldReverse(refStrike, testStrike, AZ_TOL_DEFAULT); + } + + private static boolean shouldReverse(double refStrike, double testStrike, double tolerance) { + double diff = FaultUtils.getAbsAngleDiff(refStrike, testStrike); + Preconditions.checkState(diff >= 0d && diff <= 180d, "bad diff=%s for (%s - %s)", diff, refStrike, testStrike); + if (diff <= tolerance) + return false; + if (diff > 180d-tolerance) + return true; + throw new IllegalStateException("Couldn't determine if the trace should be reversed: ref=" + +oDF.format(refStrike)+", dest="+oDF.format(testStrike)+", diff="+oDF.format(diff)); + } + + private static FaultTrace getReversed(FaultTrace trace) { + FaultTrace reversed = new FaultTrace(trace.getName(), trace.size()); + for (int i=trace.size(); --i>=0;) + reversed.add(trace.get(i)); + return reversed; + } + +} \ No newline at end of file diff --git a/src/main/java/scratch/kevin/ucerf3/PureScratch.java b/src/main/java/scratch/kevin/ucerf3/PureScratch.java index b0a3a9e6..4662a967 100644 --- a/src/main/java/scratch/kevin/ucerf3/PureScratch.java +++ b/src/main/java/scratch/kevin/ucerf3/PureScratch.java @@ -136,6 +136,7 @@ import org.opensha.sha.earthquake.faultSysSolution.modules.PolygonFaultGridAssociations; import org.opensha.sha.earthquake.faultSysSolution.modules.RegionsOfInterest; import org.opensha.sha.earthquake.faultSysSolution.modules.RupMFDsModule; +import org.opensha.sha.earthquake.faultSysSolution.modules.SectAreas; import org.opensha.sha.earthquake.faultSysSolution.modules.SolutionLogicTree; import org.opensha.sha.earthquake.faultSysSolution.modules.SolutionSlipRates; import org.opensha.sha.earthquake.faultSysSolution.modules.TrueMeanRuptureMappings; @@ -3407,13 +3408,26 @@ private static void test349() throws IOException { visible.addParameter(param); } + private static void test350() throws IOException { + FaultSystemRupSet rs = FaultSystemRupSet.load(new File("/home/kevin/OpenSHA/nshm23/batch_inversions/" + + "2024_02_02-nshm23_branches-WUS_FM_v3/results_WUS_FM_v3_branch_averaged.zip")); + + SectAreas areas = rs.requireModule(SectAreas.class); + System.out.println("Areas is of type: "+areas.getClass()); + areas = SectAreas.precomputed(rs, areas.getSectAreas()); + rs.addModule(areas); + File outDir = new File("/tmp/rsout"); + Preconditions.checkState(outDir.exists() || outDir.mkdir()); + rs.write(outDir); + } + /** * @param args * @throws Exception */ public static void main(String[] args) throws Exception { try { - test348(); + test350(); } catch (Throwable t) { t.printStackTrace(); System.exit(1); diff --git a/src/main/java/scratch/ned/ETAS_ERF/testModels/TestModel1_FSS.java b/src/main/java/scratch/ned/ETAS_ERF/testModels/TestModel1_FSS.java index 081639ae..7ff6551d 100644 --- a/src/main/java/scratch/ned/ETAS_ERF/testModels/TestModel1_FSS.java +++ b/src/main/java/scratch/ned/ETAS_ERF/testModels/TestModel1_FSS.java @@ -52,7 +52,7 @@ public class TestModel1_FSS extends InversionFaultSystemSolution { int totNumRups=1653; // found by computing once - ArrayList subSectionData; + List subSectionData; double[] rateForRup = new double[totNumRups]; double[] magForRup = new double[totNumRups]; double[] areaForRup = new double[totNumRups]; // square-meters diff --git a/src/main/java/scratch/ned/ETAS_ERF/testModels/TestModel2_FSS.java b/src/main/java/scratch/ned/ETAS_ERF/testModels/TestModel2_FSS.java index 71f0303f..68d6cb6b 100644 --- a/src/main/java/scratch/ned/ETAS_ERF/testModels/TestModel2_FSS.java +++ b/src/main/java/scratch/ned/ETAS_ERF/testModels/TestModel2_FSS.java @@ -55,7 +55,7 @@ public class TestModel2_FSS extends U3FaultSystemSolution { int totNumRups; // found by computing once - ArrayList subSectionData; + List subSectionData; double[] rateForRup; double[] magForRup; double[] areaForRup; // square-meters diff --git a/src/main/java/scratch/ned/ETAS_ERF/testModels/TestModel3_FSS.java b/src/main/java/scratch/ned/ETAS_ERF/testModels/TestModel3_FSS.java index 65bdd34f..f0075d9f 100644 --- a/src/main/java/scratch/ned/ETAS_ERF/testModels/TestModel3_FSS.java +++ b/src/main/java/scratch/ned/ETAS_ERF/testModels/TestModel3_FSS.java @@ -58,7 +58,7 @@ public class TestModel3_FSS extends U3FaultSystemSolution { int totNumRups; - ArrayList subSectionData; + List subSectionData; double[] rateForRup; double[] magForRup; double[] areaForRup; // square-meters diff --git a/src/main/java/scratch/ned/FSS_Inversion2019/FaultSystemRuptureRateInversion.java b/src/main/java/scratch/ned/FSS_Inversion2019/FaultSystemRuptureRateInversion.java index 795f8911..03bea493 100644 --- a/src/main/java/scratch/ned/FSS_Inversion2019/FaultSystemRuptureRateInversion.java +++ b/src/main/java/scratch/ned/FSS_Inversion2019/FaultSystemRuptureRateInversion.java @@ -115,8 +115,8 @@ public class FaultSystemRuptureRateInversion { String modelSetUpInfoString, modelRunInfoString; // input data - private ArrayList fltSectionDataList; - private ArrayList sectionRateConstraints; // using old class for "segments" + private List fltSectionDataList; + private List sectionRateConstraints; // using old class for "segments" private int[][] rupSectionMatrix; // section attributes @@ -158,11 +158,11 @@ public class FaultSystemRuptureRateInversion { // segmentation (rup rate = 0) constraints int num_segConstraints; - ArrayList segmentationConstraintList; - ArrayList slipRateSegmentationConstraintList; + List segmentationConstraintList; + List slipRateSegmentationConstraintList; // the following specifies fault section indices where a Laplacian smoothness constraint should be applied - ArrayList smoothnessConstraintList; + List smoothnessConstraintList; @@ -497,12 +497,12 @@ public void writeInversionSetUpInfoToFile(String dirName) { */ public FaultSystemRuptureRateInversion( String modelName, String slipRateModelName, - ArrayList fltSectionDataList, + List fltSectionDataList, int[][] rupSectionMatrix, SlipAlongRuptureModelEnum slipModelType, ScalingRelationshipEnum scalingRel, - ArrayList slipRateSegmentationConstraintList, - ArrayList sectionRateConstraints, + List slipRateSegmentationConstraintList, + List sectionRateConstraints, double relativeSectRateWt, double relative_aPrioriRupWt, String aPrioriRupRatesFilename, @@ -513,12 +513,12 @@ public FaultSystemRuptureRateInversion( String modelName, IncrementalMagFreqDist mfdConstraint, IncrementalMagFreqDist mfdSigma, // uncertainty of the MFD (1 sigma) double relativeMFD_constraintWt, - ArrayList segmentationConstraintList, + List segmentationConstraintList, double relative_segConstraintWt, double totalRateConstraint, double totalRateSigma, double relativeTotalRateConstraintWt, - ArrayList smoothnessConstraintList, + List smoothnessConstraintList, double relativeSmoothnessConstraintWt, double magAareaAleatoryVariability) { diff --git a/src/main/java/scratch/ned/FSS_Inversion2019/SimpleFaultInversion.java b/src/main/java/scratch/ned/FSS_Inversion2019/SimpleFaultInversion.java index 80bfb070..10f7f847 100644 --- a/src/main/java/scratch/ned/FSS_Inversion2019/SimpleFaultInversion.java +++ b/src/main/java/scratch/ned/FSS_Inversion2019/SimpleFaultInversion.java @@ -119,7 +119,7 @@ public class SimpleFaultInversion { final static double hazGridSpacing = 0.05; - ArrayList faultSectionDataList; + List faultSectionDataList; int[][] rupSectionMatrix; int numSections, numRuptures; @@ -501,7 +501,7 @@ else if(NUM_SUBSECT_PER_RUP == 4) { // System.exit(-1); } - public ArrayList getFaultSectionDataList() { + public List getFaultSectionDataList() { return faultSectionDataList; } @@ -1919,7 +1919,7 @@ public FaultSystemRuptureRateInversion getSolution(boolean rePlotOny) { System.out.println(mfdTargetType+" total rate "+targetMFD.getTotalIncrRate()); } - ArrayList fltSectDataList = getFaultSectionDataList(); + List fltSectDataList = getFaultSectionDataList(); int[][] rupSectionMatrix = getRupSectionMatrix(); // this will be used to keep track of runtimes diff --git a/src/main/java/scratch/ned/SSAF_Inversion/SoSAF_SubSectionInversion.java b/src/main/java/scratch/ned/SSAF_Inversion/SoSAF_SubSectionInversion.java index b03aded6..d962cd9d 100644 --- a/src/main/java/scratch/ned/SSAF_Inversion/SoSAF_SubSectionInversion.java +++ b/src/main/java/scratch/ned/SSAF_Inversion/SoSAF_SubSectionInversion.java @@ -9,6 +9,7 @@ import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; +import java.util.List; import org.apache.poi.hssf.usermodel.HSSFCell; import org.apache.poi.hssf.usermodel.HSSFRow; @@ -954,7 +955,7 @@ else if(deformationModel.equals("D2.3")) for(int i=0; i faultSectionDataList = faultData.getSubSectionsList(ddw/2.0+0.01); // add a bit to get the correct number of subsections + List faultSectionDataList = faultData.getSubSectionsList(ddw/2.0+0.01); // add a bit to get the correct number of subsections // System.out.println(faultSectionDataList.get(0).toString()); // System.exit(0); @@ -799,7 +799,7 @@ public static FaultSystemRuptureRateInversion getSA_Solution(IncrementalMagFreqD mfdSigma.scale(0.1); // uncertainty is 10% } - ArrayList faultSectionDataList = faultData.getSubSectionsList(ddw/NUM_SUBSECT_PER_RUP+0.01); // add a bit to get the correct number of subsections + List faultSectionDataList = faultData.getSubSectionsList(ddw/NUM_SUBSECT_PER_RUP+0.01); // add a bit to get the correct number of subsections // set all the section ids and compute length double lengthSum = 0; diff --git a/src/main/java/scratch/ned/rupsInFaultSystem/CreateRupturesFromSections.java b/src/main/java/scratch/ned/rupsInFaultSystem/CreateRupturesFromSections.java index ce642ccd..bd4c235a 100644 --- a/src/main/java/scratch/ned/rupsInFaultSystem/CreateRupturesFromSections.java +++ b/src/main/java/scratch/ned/rupsInFaultSystem/CreateRupturesFromSections.java @@ -10,6 +10,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; +import java.util.List; import org.opensha.commons.data.NamedComparator; import org.opensha.commons.geo.Location; @@ -25,15 +26,15 @@ public class CreateRupturesFromSections { protected final static boolean D = false; // for debugging - ArrayList allFaultSectionPrefData; + List allFaultSectionPrefData; double subSectionDistances[][],subSectionAzimuths[][];; String endPointNames[]; Location endPointLocs[]; int numSections, numSubSections, minNumSubSectInRup; - ArrayList> subSectionConnectionsListList, endToEndSectLinksList; + List> subSectionConnectionsListList, endToEndSectLinksList; double maxJumpDist, maxAzimuthChange, maxTotAzimuthChange, maxSubSectionLength; - ArrayList> subSectionPrefDataListList; - ArrayList subSectionPrefDataList; // same as above, but a sequential list (not list of lists) + List> subSectionPrefDataListList; + List subSectionPrefDataList; // same as above, but a sequential list (not list of lists) // this is to store the section and subsection indices for the ith subsection. int[] sectForSubSectionMapping,subSectForSubSectMapping, firstSubsectOfSectMapping; @@ -150,10 +151,10 @@ public SectionCluster getCluster(int clusterIndex) { - public ArrayList> getRupList() { - ArrayList> rupList = new ArrayList>(); + public List> getRupList() { + List> rupList = new ArrayList<>(); for(int i=0; i>(); + subSectionPrefDataListList = new ArrayList>(); subSectionPrefDataList = new ArrayList(); numSubSections=0; numSections = allFaultSectionPrefData.size(); @@ -284,7 +285,7 @@ private void createSubSections(boolean includeSectionsWithNaN_slipRates) { for(int i=0; i subSectData = faultSectionPrefData.getSubSectionsList(maxSectLength); + List subSectData = faultSectionPrefData.getSubSectionsList(maxSectLength); // if(subSectData.size()>maxNumSubSections) maxNumSubSections = numSubSections; // this was a bug - fixed below if(subSectData.size()>maxNumSubSections) maxNumSubSections = subSectData.size(); numSubSections += subSectData.size(); @@ -476,7 +477,7 @@ private void calcSubSectionAzimuths() { */ private void computeCloseSubSectionsListList() { - subSectionConnectionsListList = new ArrayList>(); + subSectionConnectionsListList = new ArrayList>(); // first add the adjacent subsections in that section for(int i=0; i sect1_List = subSectionPrefDataListList.get(i); + List sect1_List = subSectionPrefDataListList.get(i); for(int j=i+1; j sect2_List = subSectionPrefDataListList.get(j); + List sect2_List = subSectionPrefDataListList.get(j); double minDist=Double.MAX_VALUE; int subSectIndex1 = -1; int subSectIndex2 = -1; @@ -525,7 +526,7 @@ private void computeCloseSubSectionsListList() { private void makeClusterList() { // make an arrayList of subsection integers - ArrayList availableSubSections = new ArrayList(); + List availableSubSections = new ArrayList(); for(int i=0; i(); @@ -548,7 +549,7 @@ private void makeClusterList() { private void addLinks(int subSectIndex, SectionCluster list) { - ArrayList branches = subSectionConnectionsListList.get(subSectIndex); + List branches = subSectionConnectionsListList.get(subSectIndex); for(int i=0; i sectList = subSectionConnectionsListList.get(sIndex1); + List sectList = subSectionConnectionsListList.get(sIndex1); outputString += "\n"+subSectionPrefDataList.get(sIndex1).getName() + " connections:\n\n"; for(int i=0;i ArrayList> sectionConnectionsList, endToEndSectLinksList; ArrayList sectionClusterList; double maxJumpDist, maxAngle, maxTotStrikeChange, maxSubSectionLength; - ArrayList> subSectionPrefDataList; + List> subSectionPrefDataList; /** @@ -86,8 +87,8 @@ public OldSectionCluster getCluster(int clusterIndex) { - public ArrayList> getRupList() { - ArrayList> rupList = new ArrayList>(); + public List> getRupList() { + List> rupList = new ArrayList<>(); for(int i=0; i>(); + subSectionPrefDataList = new ArrayList<>(); numSubSections=0; numSections = allFaultSectionPrefData.size(); for(int i=0; i subSectData = faultSectionPrefData.getSubSectionsList(maxSubSectionLength); + List subSectData = faultSectionPrefData.getSubSectionsList(maxSubSectionLength); numSubSections += subSectData.size(); subSectionPrefDataList.add(subSectData); } @@ -324,7 +325,7 @@ private void computeEndToEndSectLinksList() { private void computeSectionClusters() { sectionClusterList = new ArrayList(); // duplicate the following because it will get modified - ArrayList> tempEndToEndSectList = (ArrayList>)endToEndSectLinksList.clone(); + List> tempEndToEndSectList = new ArrayList<>(endToEndSectLinksList); int clusterCounter=0; while(tempEndToEndSectList.size() > 0) { @@ -349,7 +350,7 @@ private void testClusters() { int sum =0; for(int j=0;j indices = sectionClusterList.get(j).getSectionIndicesInCluster(); + List indices = sectionClusterList.get(j).getSectionIndicesInCluster(); // System.out.println(indices); if(indices.contains(Integer.valueOf(i))) sum+=1; @@ -487,7 +488,7 @@ public void plotAllTraces(double maxDist, double minAngle, double maxAngle) { // Note that this produces erroneous zig-zag plot for traces that have multiple lats for a given lon // (functions force x values to monotonically increase) - public void plotAllSections(ArrayList link) { + public void plotAllSections(List link) { ArbitrarilyDiscretizedFunc func = new ArbitrarilyDiscretizedFunc(); double minLat=1000, maxLat=-1000, minLon=1000, maxLon=-1000; String name = new String(); @@ -522,7 +523,7 @@ public void plotAllEndToEndLinks() { public void plotAllClusters() { for(int i=0;i indices = sectionClusterList.get(i).getSectionIndicesInCluster(); + List indices = sectionClusterList.get(i).getSectionIndicesInCluster(); if(indices.size()>6) plotAllSections(indices); } @@ -689,7 +690,7 @@ private double reverseAzimuth(double azimuth) { public static void main(String[] args) { long startTime=System.currentTimeMillis(); OldCreateRupturesFromSections createRups = new OldCreateRupturesFromSections(10, 45, 60, 7, 2); - ArrayList> rupList = createRups.getRupList(); + List> rupList = createRups.getRupList(); System.out.println(rupList.get(0)); int runtime = (int)(System.currentTimeMillis()-startTime)/1000; System.out.println("Run took "+runtime+" seconds"); diff --git a/src/main/java/scratch/ned/rupsInFaultSystem/OLD_STUFF/OldSectionCluster.java b/src/main/java/scratch/ned/rupsInFaultSystem/OLD_STUFF/OldSectionCluster.java index 50154465..fb61f3c7 100644 --- a/src/main/java/scratch/ned/rupsInFaultSystem/OLD_STUFF/OldSectionCluster.java +++ b/src/main/java/scratch/ned/rupsInFaultSystem/OLD_STUFF/OldSectionCluster.java @@ -2,6 +2,7 @@ import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import org.opensha.refFaultParamDb.vo.FaultSectionPrefData; @@ -16,21 +17,21 @@ */ public class OldSectionCluster { - ArrayList> localEndToEndSectLinksList; - ArrayList allSectEndPtsInCluster; - ArrayList> subSectionPrefDataList; - ArrayList allSubSectionsIdList; - ArrayList> rupList; + List> localEndToEndSectLinksList; + List allSectEndPtsInCluster; + List> subSectionPrefDataList; + List allSubSectionsIdList; + List> rupList; int minNumSubSectInRup; /** * * @return */ - public ArrayList getSectionIndicesInCluster() { - ArrayList sectIndices = new ArrayList(); + public List getSectionIndicesInCluster() { + List sectIndices = new ArrayList(); for(int i=0; i endToEndLink = localEndToEndSectLinksList.get(i); + List endToEndLink = localEndToEndSectLinksList.get(i); // System.out.println("endToEndLink="+endToEndLink); for(int j=0; j< endToEndLink.size(); j++) { Integer linkInt = endToEndLink.get(j); @@ -48,14 +49,14 @@ public ArrayList getSectionIndicesInCluster() { * @param endToEndSectLinksList * @return */ - public ArrayList> CreateCluster(ArrayList> endToEndSectLinksList, - ArrayList> subSectionPrefDataList, int minNumSubSectInRup) { + public List> CreateCluster(List> endToEndSectLinksList, + List> subSectionPrefDataList, int minNumSubSectInRup) { this.subSectionPrefDataList = subSectionPrefDataList; this.minNumSubSectInRup = minNumSubSectInRup; // make hashmap of endToEndSectLinksList to make it easier to remove those used from what's passed back - HashMap> endToEndLinkdListHashMap = new HashMap>(); + HashMap> endToEndLinkdListHashMap = new HashMap<>(); for(int i=0;i> CreateCluster(ArrayList> for(int i=1; i endToEndLink = endToEndSectLinksList.get(j); + List endToEndLink = endToEndSectLinksList.get(j); for(int k=0; k> CreateCluster(ArrayList> } } - localEndToEndSectLinksList = new ArrayList>(); + localEndToEndSectLinksList = new ArrayList<>(); for(int i=0; i endToEndLink = localEndToEndSectLinksList.get(l); - ArrayList endToEndSubsectionIDs = new ArrayList(); + List endToEndLink = localEndToEndSectLinksList.get(l); + List endToEndSubsectionIDs = new ArrayList(); for(int i=0; i< endToEndLink.size();i+=2) { sectionIndex = endToEndLink.get(i).intValue()/2; // convert from endpoint index to section index - ArrayList prefSubsectDataForSection = subSectionPrefDataList.get(sectionIndex); + List prefSubsectDataForSection = subSectionPrefDataList.get(sectionIndex); for(int k=0; k< prefSubsectDataForSection.size();k++) { endToEndSubsectionIDs.add(prefSubsectDataForSection.get(k).getSectionId()); } @@ -193,7 +194,7 @@ private void computeRupList() { for(int numSubSectInRup=minNumSubSectInRup;numSubSectInRup<=endToEndSubsectionIDs.size();numSubSectInRup++) { for(int s=0;s lastRup = rupListIndices.get(rupListIndices.size()-1); + List lastRup = rupListIndices.get(rupListIndices.size()-1); // System.out.println("lastRup.get(lastRup.size()-1) = " + lastRup.get(lastRup.size()-1)); // System.out.println("lastSubSect = " + lastSubSect); continue; @@ -147,7 +148,7 @@ private void addRuptures(ArrayList list) { } } - ArrayList newList = (ArrayList)list.clone(); + List newList = new ArrayList<>(list); newList.add(newSubSect); if(newList.size() >= minNumSubSectInRup) {// it's a rupture rupListIndices.add(newList); @@ -171,7 +172,7 @@ private void addRuptures(ArrayList list) { private void computeRupList() { System.out.println("Cluster: "+this); - rupListIndices = new ArrayList>(); + rupListIndices = new ArrayList>(); // loop over every subsection as the first in the rupture int progress = 0; int progressIncrement = 5; @@ -186,7 +187,7 @@ private void computeRupList() { System.out.print(progress+"\t"); progress += progressIncrement; } - ArrayList subSectList = new ArrayList(); + List subSectList = new ArrayList(); int subSectIndex = get(s); subSectList.add(subSectIndex); addRuptures(subSectList); @@ -195,10 +196,10 @@ private void computeRupList() { System.out.print("\n"); // now filter out duplicates (which would exist in reverse order) & change from containing indices to IDs - ArrayList> newRupList = new ArrayList>(); + List> newRupList = new ArrayList>(); for(int r=0; r< rupListIndices.size();r++) { - ArrayList rup = rupListIndices.get(r); - ArrayList reverseRup = new ArrayList(); + List rup = rupListIndices.get(r); + List reverseRup = new ArrayList(); for(int i=rup.size()-1;i>=0;i--) reverseRup.add(rup.get(i)); if(!newRupList.contains(reverseRup)) { // keep if we don't already have newRupList.add(rup); @@ -211,11 +212,11 @@ private void computeRupList() { // Remove ruptures where subsection strikes have too big a spread double MAXstrikeDiff = 90; //maximum allowed difference in strikes between any two subsections in the same rupture, in degrees - ArrayList> toRemove = new ArrayList>(); + List> toRemove = new ArrayList>(); for(int r=0; r< numRupsAdded;r++) { - ArrayList rup = rupListIndices.get(r); + List rup = rupListIndices.get(r); //System.out.println("rup = " + rup); - ArrayList strikes = new ArrayList(rup.size()); + List strikes = new ArrayList(rup.size()); for (int i=0; i allFaultSectionPrefData; + List allFaultSectionPrefData; double subSectionDistances[][],subSectionAzimuths[][];; String endPointNames[]; Location endPointLocs[]; int numSections, numSubSections, minNumSubSectInRup; - ArrayList> subSectionConnectionsListList, endToEndSectLinksList; + List> subSectionConnectionsListList, endToEndSectLinksList; double maxJumpDist, maxAzimuthChange, maxTotAzimuthChange, maxSubSectionLength; - ArrayList> subSectionPrefDataListList; - ArrayList subSectionPrefDataList; // same as above, but a sequential list (not list of lists) + List> subSectionPrefDataListList; + List subSectionPrefDataList; // same as above, but a sequential list (not list of lists) // this is to store the section and subsection indices for the ith subsection. int[] sectForSubSectionMapping,subSectForSubSectMapping, firstSubsectOfSectMapping; @@ -44,7 +45,7 @@ public class CreateRupturesFromSections { String test = ""; // this string is used filled in with something when we're testing with a subset of fault sections // (it's added to the distance-calc output files to avoid confusion from when we're doing the full calc) - ArrayList sectionClusterList; + List sectionClusterList; /** * @@ -145,8 +146,8 @@ public SectionCluster getCluster(int clusterIndex) { - public ArrayList> getRupList() { - ArrayList> rupList = new ArrayList>(); + public List> getRupList() { + List> rupList = new ArrayList<>(); for(int i=0; i>(); + subSectionPrefDataListList = new ArrayList<>(); subSectionPrefDataList = new ArrayList(); numSubSections=0; numSections = allFaultSectionPrefData.size(); @@ -269,7 +270,7 @@ private void createSubSections(boolean includeSectionsWithNaN_slipRates) { for(int i=0; i subSectData = faultSectionPrefData.getSubSectionsList(maxSectLength); + List subSectData = faultSectionPrefData.getSubSectionsList(maxSectLength); numSubSections += subSectData.size(); // bug fixed! (this used to be below the following line) if(subSectData.size()>maxNumSubSections) maxNumSubSections = numSubSections; subSectionPrefDataListList.add(subSectData); @@ -394,7 +395,7 @@ private void calcSubSectionAzimuths() { */ private void computeCloseSubSectionsListList() { - subSectionConnectionsListList = new ArrayList>(); + subSectionConnectionsListList = new ArrayList<>(); // first add the adjacent subsections in that section for(int i=0; i sect1_List = subSectionPrefDataListList.get(i); + List sect1_List = subSectionPrefDataListList.get(i); for(int j=i+1; j sect2_List = subSectionPrefDataListList.get(j); + List sect2_List = subSectionPrefDataListList.get(j); double minDist=Double.MAX_VALUE; int subSectIndex1 = -1; int subSectIndex2 = -1; @@ -466,7 +467,7 @@ private void makeClusterList() { private void addLinks(int subSectIndex, SectionCluster list) { - ArrayList branches = subSectionConnectionsListList.get(subSectIndex); + List branches = subSectionConnectionsListList.get(subSectIndex); for(int i=0; i sectList = subSectionConnectionsListList.get(sIndex1); + List sectList = subSectionConnectionsListList.get(sIndex1); outputString += "\n"+subSectionPrefDataList.get(sIndex1).getName() + " connections:\n\n"; for(int i=0;i { - ArrayList subSectionPrefDataList; - ArrayList allSubSectionsIdList = null; - ArrayList> subSectionConnectionsListList; - ArrayList> rupListIndices; // elements here are subsection IDs + List subSectionPrefDataList; + List allSubSectionsIdList = null; + List> subSectionConnectionsListList; + List> rupListIndices; // elements here are subsection IDs int minNumSubSectInRup; int numRupsAdded; double maxAzimuthChange, maxTotAzimuthChange; double[][] subSectionAzimuths; - public SectionCluster(ArrayList subSectionPrefDataList, int minNumSubSectInRup, - ArrayList> subSectionConnectionsListList, double[][] subSectionAzimuths, + public SectionCluster(List subSectionPrefDataList, int minNumSubSectInRup, + List> subSectionConnectionsListList, double[][] subSectionAzimuths, double maxAzimuthChange, double maxTotAzimuthChange) { this.minNumSubSectInRup = minNumSubSectInRup; this.subSectionPrefDataList = subSectionPrefDataList; @@ -47,7 +48,7 @@ public int getNumSubSections() { * This returns the IDs of all the subsections in the cluster * @return */ - public ArrayList getAllSubSectionsIdList() { + public List getAllSubSectionsIdList() { if(allSubSectionsIdList==null) computeAllSubSectionsIdList(); return allSubSectionsIdList; } @@ -66,15 +67,15 @@ public int getNumRuptures() { } - public ArrayList> getRuptures() { + public List> getRuptures() { // return new ArrayList>(); if(rupListIndices== null) computeRupList(); // now convert to holding subsection IDs - ArrayList> rupList = new ArrayList>(); + List> rupList = new ArrayList<>(); for(int i=0;i rup = rupListIndices.get(i); - ArrayList newRup = new ArrayList(); + List rup = rupListIndices.get(i); + List newRup = new ArrayList(); for(int j=0;j> getRuptures() { } - public ArrayList> getRupturesByIndices() { + public List> getRupturesByIndices() { if(rupListIndices== null) computeRupList(); return rupListIndices; @@ -103,7 +104,7 @@ private void addRuptures(ArrayList list) { lastSubSect = -1; // bogus index because subSectIndex is first in list // loop over branches at the present subsection - ArrayList branches = subSectionConnectionsListList.get(subSectIndex); + List branches = subSectionConnectionsListList.get(subSectIndex); for(int i=0; i list) { if(newLastAzimuthDiff>maxAzimuthChange) { // System.out.println("Azimuth difference is too large!"); - ArrayList lastRup = rupListIndices.get(rupListIndices.size()-1); + List lastRup = rupListIndices.get(rupListIndices.size()-1); // System.out.println("lastRup.get(lastRup.size()-1) = " + lastRup.get(lastRup.size()-1)); // System.out.println("lastSubSect = " + lastSubSect); continue; @@ -172,7 +173,7 @@ private void addRuptures(ArrayList list) { private void computeRupList() { System.out.println("Cluster: "+this); - rupListIndices = new ArrayList>(); + rupListIndices = new ArrayList<>(); // loop over every subsection as the first in the rupture int progress = 0; int progressIncrement = 5; @@ -194,10 +195,10 @@ private void computeRupList() { System.out.print("\n"); // now filter out duplicates (which would exist in reverse order) & change from containing indices to IDs - ArrayList> newRupList = new ArrayList>(); + List> newRupList = new ArrayList<>(); for(int r=0; r< rupListIndices.size();r++) { - ArrayList rup = rupListIndices.get(r); - ArrayList reverseRup = new ArrayList(); + List rup = rupListIndices.get(r); + List reverseRup = new ArrayList(); for(int i=rup.size()-1;i>=0;i--) reverseRup.add(rup.get(i)); if(!newRupList.contains(reverseRup)) { // keep if we don't already have newRupList.add(rup); @@ -210,11 +211,11 @@ private void computeRupList() { // Remove ruptures where subsection strikes have too big a spread double MAXstrikeDiff = 90; //maximum allowed difference in strikes between any two subsections in the same rupture, in degrees - ArrayList> toRemove = new ArrayList>(); + List> toRemove = new ArrayList<>(); for(int r=0; r< numRupsAdded;r++) { - ArrayList rup = rupListIndices.get(r); + List rup = rupListIndices.get(r); //System.out.println("rup = " + rup); - ArrayList strikes = new ArrayList(rup.size()); + List strikes = new ArrayList(rup.size()); for (int i=0; i Date: Thu, 29 Jan 2026 17:17:17 -0800 Subject: [PATCH 02/13] rupture building tests --- .../nshm26/DownDipInterfaceSubSectTests.java | 363 +++++++++++++++--- .../nshm26/DownDipRupSetBuildingTests.java | 240 ++++++++++++ 2 files changed, 558 insertions(+), 45 deletions(-) create mode 100644 src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java diff --git a/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java b/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java index 8b8e20d9..7249ff1d 100644 --- a/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java +++ b/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java @@ -1,16 +1,21 @@ package scratch.kevin.nshm26; import java.awt.Color; +import java.io.BufferedReader; import java.io.File; import java.io.IOException; +import java.io.InputStreamReader; +import java.io.Reader; import java.nio.charset.Charset; import java.text.DecimalFormat; import java.util.ArrayList; +import java.util.BitSet; import java.util.Collections; import java.util.List; import java.util.StringTokenizer; import org.apache.commons.math3.util.Precision; +import org.opensha.commons.data.CSVFile; import org.opensha.commons.geo.Location; import org.opensha.commons.geo.LocationList; import org.opensha.commons.geo.LocationUtils; @@ -23,7 +28,9 @@ import org.opensha.commons.util.DataUtils; import org.opensha.commons.util.DataUtils.MinMaxAveTracker; import org.opensha.commons.util.FaultUtils; +import org.opensha.commons.util.FaultUtils.AngleAverager; import org.opensha.commons.util.cpt.CPT; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.GeoJSONFaultReader; import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_SubductionScalingRelationships; import org.opensha.sha.faultSurface.DownDipSubsectionBuilder; import org.opensha.sha.faultSurface.FaultTrace; @@ -31,35 +38,73 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Range; -import com.google.common.io.Files; import net.mahdilamb.colormap.Colors; public class DownDipInterfaceSubSectTests { public static void main(String[] args) throws IOException { - File inFile = new File("/tmp/ker_slab2_dep_10km_contours.xyz"); + Class resourceClass = GeoJSONFaultSection.class; + + String faultName = "Kermadec"; + int faultID = 0; + String resourcePrefix = "/data/erf/nshm26/amsam/fault_models/subduction/"; + BufferedReader countoursIn = new BufferedReader(new InputStreamReader( + resourceClass.getResourceAsStream(resourcePrefix+"ker_slab2_dep_10km_contours.xyz"))); + CSVFile traceCSV = CSVFile.readStream( + resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_ker.csv"), true); + boolean smoothTraceForDDW = true; String prefix = "ker_slab2"; Range depthRange = Range.closed(0d, 60d); Range lonFilter = null; Range latFilter = Range.atLeast(-30d); -// File inFile = new File("/tmp/izu_slab2_dep_10km_contours.xyz"); +// String faultName = "Izu-Bonin"; +// int faultID = 0; +// String resourcePrefix = "/data/erf/nshm26/gnmi/fault_models/subduction/"; +// BufferedReader countoursIn = new BufferedReader(new InputStreamReader( +// GeoJSONFaultSection.class.getResourceAsStream(resourcePrefix+"izu_slab2_dep_10km_contours.xyz"))); +// CSVFile traceCSV = CSVFile.readStream( +// resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_izu.csv"), true); +// boolean smoothTraceForDDW = true; // String prefix = "izu_slab2"; // Range depthRange = Range.closed(0d, 60d); // Range lonFilter = null; // Range latFilter = Range.atMost(27d); - File outputDir = inFile.getParentFile(); + File outputDir = new File("/tmp/"+prefix); + Preconditions.checkArgument(outputDir.exists() || outputDir.mkdir()); + + double traceSmoothDist = 200d; double scaleLength = 20d; boolean scaleIsMax = false; boolean constantCount = false; - List rawContours = loadDepthContours(inFile); + List rawContours = loadDepthContours(countoursIn); + + // load the upper trace + FaultTrace upper = new FaultTrace(); + AngleAverager upperAzAvg = new AngleAverager(); + for (int row=1; row lines = new ArrayList<>(); List chars = new ArrayList<>(); @@ -84,26 +133,32 @@ public static void main(String[] args) throws IOException { lines.add(raw); chars.add(rawChar); } + + PlotCurveCharacterstics traceChar = new PlotCurveCharacterstics(PlotLineType.SOLID, 1f, Colors.tab_orange); + lines.add(upper); + chars.add(traceChar); mapMaker.plotLines(lines, chars); mapMaker.plot(outputDir, prefix+"_raw", " "); List> stitchedIndividual = new ArrayList<>(); - List depthContours = processContours(rawContours, stitchedIndividual); + List depthContours = processContours(rawContours, upper, stitchedIndividual); // for (int d=0; d depthRange = Range.closedOpen(0d, 30d); - GeoJSONFaultSection refSect = new GeoJSONFaultSection.Builder(0, "Test Fault", depthContours.get(0)) + GeoJSONFaultSection refSect = new GeoJSONFaultSection.Builder(faultID, faultName, upper) .dip(90d).upperDepth(depthRange.lowerEndpoint()).lowerDepth(depthRange.upperEndpoint()) .rake(90).build(); lines.clear(); chars.clear(); + PlotCurveCharacterstics processedChar = new PlotCurveCharacterstics(PlotLineType.SOLID, 1f, Color.BLACK); for (FaultTrace processed : depthContours) { lines.add(processed); chars.add(processedChar); } + PlotCurveCharacterstics connectorChar = new PlotCurveCharacterstics(PlotLineType.SOLID, 2f, Colors.tab_red); for (List stitched : stitchedIndividual) { for (int i=1; i interpTraces = DownDipSubsectionBuilder.interpolateDepthTraces( - depthContours, 2, scaleLength, scaleIsMax, depthRange); + List interpTraces; + if (smoothTraceForDDW) { + int numResample = 1000; + FaultTrace upperResampled = FaultUtils.resampleTrace(upper, numResample); + double avgDepth = upperResampled.stream().mapToDouble(l -> l.depth).average().getAsDouble(); + double minDepth = upper.stream().mapToDouble(l -> l.depth).min().getAsDouble(); + double maxDepth = upper.stream().mapToDouble(l -> l.depth).max().getAsDouble(); + System.out.println("Averaging out depth of upper trace for DDW calculation; avg=" + +(float)avgDepth+" km, range=["+(float)minDepth+", "+(float)maxDepth+"]"); + + boolean replaceInterpolatedWithUpper = depthRange == null || depthRange.contains(avgDepth); + + FaultTrace localLimit = FaultUtils.resampleTrace(depthContours.get(1), numResample); + + FaultTrace upperForDDW; + if (Double.isFinite(traceSmoothDist) && traceSmoothDist > 0d) { + double[] smoothed = new double[upperResampled.size()]; + double length = upper.getTraceLength(); + + System.out.println("\tUsing smoothing distance="+(float)traceSmoothDist+" (in either direction) for length="+(float)length); + + double lengthEach = length/(upperResampled.size()-1d); + int numAway = (int)Math.round(traceSmoothDist/lengthEach); + + for (int i=0; i= localLimit.get(i).depth) + // we smoothed past the next layer down, use that depth as a limit + smoothed[i] = localLimit.get(i).depth; + } + for (int i=0; i l.depth).average().getAsDouble(); + minDepth = upperResampled.stream().mapToDouble(l -> l.depth).min().getAsDouble(); + maxDepth = upperResampled.stream().mapToDouble(l -> l.depth).max().getAsDouble(); + System.out.println("Smoothed upper trace for DDW calculation: avg=" + +(float)avgDepth+" km, range=["+(float)minDepth+", "+(float)maxDepth+"]"); + } else { + upperForDDW = new FaultTrace(null, upper.size()); + for (int i=0; i depthContoursForDDW = new ArrayList<>(depthContours); + depthContoursForDDW.set(0, upperForDDW); + interpTraces = DownDipSubsectionBuilder.interpolateDepthTraces( + depthContoursForDDW, 2, scaleLength, scaleIsMax, depthRange); + if (replaceInterpolatedWithUpper) { + System.out.println("Replacing interpolated upper with original"); + interpTraces.set(0, upper); + } + } else { + interpTraces = DownDipSubsectionBuilder.interpolateDepthTraces( + depthContours, 2, scaleLength, scaleIsMax, depthRange); + } + System.out.println("Built "+interpTraces.size()+" interpolated traces"); for (FaultTrace trace : interpTraces) System.out.println("\t"+traceStr(trace)); @@ -220,20 +344,25 @@ public static void main(String[] args) throws IOException { System.out.println("Sub-Sect Dip:\t"+dipRange); System.out.println("Sub-Sect Row counts:\t"+perRowRange); + mapMaker.setWriteGeoJSON(false); mapMaker.plot(outputDir, prefix+"_sub_sects", " "); + + GeoJSONFaultReader.writeFaultSections(new File(outputDir, prefix+"_sub_sects.geojson"), allSects); } - private static List processContours(List contours) { - return processContours(contours, null); + private static List processContours(List contours, FaultTrace upper) { + return processContours(contours, upper, null); } - private static List processContours(List contours, List> stitchedIndividual) { + private static List processContours(List contours, FaultTrace upper, List> stitchedIndividual) { // group by depth List> depthGrouped = new ArrayList<>(); double curDepth = Double.NaN; List curTraces = null; for (FaultTrace trace : contours) { double depth = trace.first().depth; + for (Location loc : trace) + Preconditions.checkState(Precision.equals(depth, loc.depth, 0.1), "Depths vary within contours: %s != %s", (float)depth, (float)loc.depth); if (curTraces == null || !Precision.equals(depth, curDepth, 0.1)) { curTraces = new ArrayList<>(); curDepth = depth; @@ -291,8 +420,16 @@ private static List processContours(List contours, List< System.out.println("Detected middle strike="+oDF.format(strike)+" with dip direction of " +oDF.format(dipDir)+" (delta="+FaultUtils.getAbsAngleDiff(strike, dipDir)+")"); + double upperMaxDepth = 0d; + for (Location loc : upper) + upperMaxDepth = Math.max(upperMaxDepth, loc.depth); + + FaultTrace resampledUpper = null; + // now stitch - List stitched = new ArrayList<>(); + List stitched = new ArrayList<>(depthGrouped.size()+1); + // add the upper trace first + stitched.add(upper); for (List depthTraces : depthGrouped) { double depth = depthTraces.get(0).first().depth; System.out.println(oDF.format(depth)+" km: "+depthTraces.size()+" contours"); @@ -376,6 +513,104 @@ private static List processContours(List contours, List< stitchedList.set(i, getReversed(stitchedList.get(i))); } + if (depth < upperMaxDepth || Precision.equals(depth, upperMaxDepth, 2d)) { + // see if we can stitch anything in from the upper trace + System.out.println("Will try to stitch in from the upper trace that dips below this one (down to " + +(float)upperMaxDepth+" km); upper strike: "+upper.getStrikeDirection()); + if (resampledUpper == null) { + resampledUpper = FaultUtils.resampleTrace(upper, (int)Math.max(1000, upper.getTraceLength()/0.1)); + Preconditions.checkState(!shouldReverse(strike, resampledUpper.getStrikeDirection())); + } + List modStitchedList = new ArrayList<>(); + modStitchedList.add(stitchedList.get(0)); + for (int s=1; s 1 && after.size() > 1) { + // draw lines extending the trace before and after this gap toward the gap + Location before1 = before.get(before.size()-2); + Location before2 = before.last(); + Location after1 = after.get(1); + Location after2 = after.first(); + + // and draw lines perpendicular to those lines that will be used to find trace points between them + // we'll make sure the trace points are to the right (positive in LocationUtils methods) of + // these lines + Location beforePerp1 = before2; + // azimuth is the direction - 90 degrees + Location beforePerp2 = LocationUtils.location(beforePerp1, + LocationUtils.azimuthRad(before1, before2) - Math.PI*0.5, 10d); + + Location afterPerp1 = after2; + // azimuth is the direction - 90 degrees + Location afterPerp2 = LocationUtils.location(afterPerp1, + LocationUtils.azimuthRad(after1, after2) - Math.PI*0.5, 10d); + + // within this distance from either end, we'll try to stop at the projection from the adjacent + // segment to get a smooth interpolation +// double gapDistance = LocationUtils.horzDistanceFast(before2, after1); +// double lineCheckDist = Math.max(10d, gapDistance * 0.2); + // on second thought, it seems to do better always forcing this check + double lineCheckDist = Double.POSITIVE_INFINITY; + + FaultTrace addition = null; + + for (int i=1; i= 0d + && LocationUtils.distanceToLineFast(afterPerp1, afterPerp2, loc) >= 0d; + if (inside) { + // we're within the envelope of this missing piece + + boolean keep = false; + if (addition == null) { + // first one that's inside, see if we have crossed the projected line + double beforeDist = LocationUtils.horzDistanceFast(before2, loc); + if (beforeDist < lineCheckDist) { + // check against the projected line and wait until we've crossed it to the right + if (LocationUtils.distanceToLineFast(before1, before2, loc) >= 0) { + // we're on the right of the projected line, start tracking + addition = new FaultTrace(ADDITION_FROM_UPPER_NAME); + keep = true; + } + } else { + // far enough away, just start tracking + addition = new FaultTrace(); + keep = true; + } + } else { + if (LocationUtils.horzDistanceFast(loc, after1) < lineCheckDist) { + // we're near the end, start checking against that line + if (LocationUtils.distanceToLineFast(after1, after2, loc) >= 0) { + // we're back to the left of the contour (to the right of the back-line) + break; + } + } + keep = true; + } + if (keep) { +// addition.add(loc); + addition.add(new Location(loc.lat, loc.lon, depth)); + } + } else if (addition != null) { + // already found it, break + break; + } + } + if (addition != null) { + modStitchedList.add(addition); + System.out.println("Stitched in an addition from the upper trace:\t"+traceStr(addition)); + } + } + + modStitchedList.add(after); + } + Preconditions.checkState(modStitchedList.size() >= stitchedList.size()); + stitchedList = modStitchedList; + } + if (stitchedIndividual != null) stitchedIndividual.add(stitchedList); @@ -396,46 +631,84 @@ private static List processContours(List contours, List< return stitched; } + private static final String ADDITION_FROM_UPPER_NAME = "Stitched From Upper Trace"; + private static List filterContours(List contours, Range latRange, Range lonRange) { List ret = new ArrayList<>(); for (FaultTrace trace : contours) { - int firstInside = -1; - int lastInside = -1; - boolean allInside = true; - boolean noneInside = true; - for (int i=0; i loadDepthContours(File inFile) throws IOException { + private static FaultTrace filterContour(FaultTrace contour, Range latRange, Range lonRange) { + + int firstInside = -1; + int lastInside = -1; + boolean allInside = true; + boolean noneInside = true; + for (int i=0; i 1) { + // try to interpolate + Location first = findFurthestInside(contour.get(firstInside), contour.get(firstInside-1), latRange, lonRange); + if (first != null) + cutTrace.add(first); + } + for (int i=firstInside; i<=lastInside; i++) + cutTrace.add(contour.get(i)); + if (lastInside < contour.size()-1) { + // try to interpolate + Location last = findFurthestInside(contour.get(lastInside), contour.get(lastInside+1), latRange, lonRange); + if (last != null) + cutTrace.add(last); + } + + return cutTrace; + } + + private static Location findFurthestInside(Location from, Location to, Range latRange, Range lonRange) { + FaultTrace resampled = new FaultTrace(null, 2); + resampled.add(from); + resampled.add(to); + resampled = FaultUtils.resampleTrace(resampled, 100); + Location lastInside = null; + for (int i=1; i loadDepthContours(BufferedReader in) throws IOException { List ret = new ArrayList<>(); FaultTrace curTrace = null; - for (String line : Files.readLines(inFile, Charset.defaultCharset())) { + String line; + while ((line = in.readLine()) != null) { line = line.trim(); if (line.startsWith(">")) { // new contour diff --git a/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java b/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java new file mode 100644 index 00000000..54af03d5 --- /dev/null +++ b/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java @@ -0,0 +1,240 @@ +package scratch.kevin.nshm26; + +import java.awt.Color; +import java.awt.image.BufferedImage; +import java.io.File; +import java.io.IOException; +import java.text.DecimalFormat; +import java.util.ArrayList; +import java.util.IntSummaryStatistics; +import java.util.List; +import java.util.concurrent.CompletableFuture; + +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.opensha.commons.gui.plot.AnimatedGIFRenderer; +import org.opensha.commons.gui.plot.GeographicMapMaker; +import org.opensha.commons.gui.plot.HeadlessGraphPanel; +import org.opensha.commons.gui.plot.PlotSpec; +import org.opensha.commons.gui.plot.PlotUtils; +import org.opensha.commons.mapping.gmt.elements.GMT_CPT_Files; +import org.opensha.commons.util.cpt.CPT; +import org.opensha.sha.earthquake.faultSysSolution.RupSetScalingRelationship; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.FaultSubsectionCluster; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.downDip.RectangularDownDipGrowingStrategy; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.downDip.RectangularDownDipGrowingStrategy.NeighborOverlaps; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.GeoJSONFaultReader; +import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_SubductionScalingRelationships; +import org.opensha.sha.faultSurface.FaultSection; +import org.opensha.sha.faultSurface.GeoJSONFaultSection; + +import com.google.common.base.Preconditions; + +import net.mahdilamb.colormap.Colors; + +public class DownDipRupSetBuildingTests { + + public static void main(String[] args) throws IOException { + String prefix = "ker_slab2"; + + File inDir = new File("/tmp/"+prefix); + File subSectsFile = new File(inDir, prefix+"_sub_sects.geojson"); + List sects = GeoJSONFaultReader.readFaultSections(subSectsFile); + File outDir = new File(inDir, "rup_set_debug"); + Preconditions.checkState(outDir.exists() || outDir.mkdir()); + + boolean doAnimation = true; + boolean writeIndvFrames = true; + + FaultSubsectionCluster fullCluster = new FaultSubsectionCluster(sects); + NeighborOverlaps neighborOverlaps = new NeighborOverlaps(fullCluster, false); + NeighborOverlaps neighborOverlapsDD = new NeighborOverlaps(fullCluster, true); + + List debugSects = new ArrayList<>(); + + int numRows = sects.stream().mapToInt(s-> s.getSubSectionIndexDownDip()).max().getAsInt()+1; + List> rowColOrganized = new ArrayList<>(numRows); + for (int i=0; i()); + for (GeoJSONFaultSection sect : sects) { + int row = sect.getSubSectionIndexDownDip(); + int col = sect.getSubSectionIndexAlong(); + List rowSects = rowColOrganized.get(row); + while (rowSects.size() <= col) + rowSects.add(null); + rowSects.set(col, sect); + } + for (int row=0; row topRow = rowColOrganized.get(0); + List bottomRow = rowColOrganized.get(numRows-1); + debugSects.add(topRow.get(0)); + debugSects.add(topRow.get(topRow.size()-1)); + debugSects.add(bottomRow.get(0)); + debugSects.add(bottomRow.get(bottomRow.size()-1)); + + List middleRow = rowColOrganized.get(numRows/2); + debugSects.add(middleRow.get(0)); + for (int i=1; i<5; i++) + debugSects.add(middleRow.get((int)(middleRow.size() * i/5d))); + debugSects.add(middleRow.get(middleRow.size()-1)); + + GeographicMapMaker mapMaker = new GeographicMapMaker(sects); + + mapMaker.setFillSurfaces(true); + mapMaker.setWriteGeoJSON(false); + + Color startSectColor = Colors.tab_blue; + CPT overlapCPT = GMT_CPT_Files.SEQUENTIAL_LAJOLLA_UNIFORM.instance().reverse().rescale(0d, 1d); + overlapCPT.setNanColor(startSectColor); + + Color participatingColor = overlapCPT.getMaxColor(); + Color otherColor = overlapCPT.getMinColor(); + + RectangularDownDipGrowingStrategy growingStrat = new RectangularDownDipGrowingStrategy(); + + RupSetScalingRelationship scale = PRVI25_SubductionScalingRelationships.LOGA_C4p0; + HeadlessGraphPanel gp = PlotUtils.initHeadless(); + int gifWidth = 800; + double gifFPS = 5; + + File animDir = new File(outDir, "animations"); + Preconditions.checkArgument(!doAnimation || animDir.exists() || animDir.mkdir()); + + for (FaultSection debugSect : debugSects) { + String sectPrefix = prefix+"_sect"+debugSect.getSectionId(); + System.out.println("Processing "+debugSect); + mapMaker.plotSectScalars(s-> s == debugSect ? Double.NaN : neighborOverlaps.getOverlap(debugSect, s), + overlapCPT, "Fractional Overlap"); + + mapMaker.plot(outDir, sectPrefix+"_overlap", " "); + mapMaker.plotSectScalars(s-> s == debugSect ? Double.NaN : neighborOverlapsDD.getOverlap(debugSect, s), + overlapCPT, "Fractional Overlap"); + + mapMaker.plot(outDir, sectPrefix+"_overlap_dd", " "); + + System.out.println("\tBuilding variations"); + growingStrat.setDebug(debugSect == debugSects.get(0)); + List variations = growingStrat.getVariations(fullCluster, debugSect); + System.out.println("\tBuilt "+variations.size()+" variations"); + Preconditions.checkState(!variations.isEmpty()); + mapMaker.clearSectScalars(); + + if (doAnimation) { + System.out.println("\tMaking animation"); + double minMag = Double.POSITIVE_INFINITY; + double maxMag = 0d; + + AnimatedGIFRenderer gif = new AnimatedGIFRenderer(new File(animDir, sectPrefix+"_rups.gif"), gifFPS, true); + AnimatedGIFRenderer gifSubSeis = new AnimatedGIFRenderer(new File(animDir, sectPrefix+"_rups_sub_seis.gif"), 0.5, true); + + File fullFrameDir = new File(outDir, sectPrefix+"_rups"); + Preconditions.checkState(!writeIndvFrames || fullFrameDir.exists() || fullFrameDir.mkdir()); + + CompletableFuture gifWriteFuture = null; + CompletableFuture subSeisFinalizeFuture = null; + boolean hasFullWidth = false; + for (int i=0; i sectColors = new ArrayList<>(sects.size()); + for (FaultSection sect : sects) { + if (sect == debugSect) + sectColors.add(startSectColor); + else if (cluster.contains(sect)) + sectColors.add(participatingColor); + else + sectColors.add(otherColor); + } + mapMaker.plotSectColors(sectColors); + + // get it organized + double area = 0d; + for (FaultSection sect : cluster.subSects) + area += sect.getArea(true); + + double mag = scale.getMag(area, Double.NaN, Double.NaN, Double.NaN, cluster.startSect.getAveRake()); + minMag = Math.min(minMag, mag); + maxMag = Math.max(maxMag, mag); + + String title = "Rupture "+i+"/"+variations.size()+", M"+twoDF.format(mag); + + PlotSpec plot = mapMaker.buildPlot(title); + + gp.drawGraphPanel(plot, false, false, mapMaker.getXRange(), mapMaker.getYRange()); + + int height = PlotUtils.calcHeight(gp, gifWidth, true); + + gp.getChartPanel().setSize(gifWidth, height); + BufferedImage img = gp.getBufferedImage(gifWidth, height); + + if (writeIndvFrames) { + File frameFile = new File(fullFrameDir, "rupture_"+frameDF.format(i)+".png"); + gp.saveAsPNG(frameFile.getAbsolutePath(),gifWidth, height); + } + + if (gifWriteFuture != null) + gifWriteFuture.join(); + if (hasFullWidth) { + gifWriteFuture = CompletableFuture.runAsync(new WriteFrameRunnable(img, gif)); + } else { + gifWriteFuture = CompletableFuture.allOf(CompletableFuture.runAsync(new WriteFrameRunnable(img, gif)), + CompletableFuture.runAsync(new WriteFrameRunnable(img, gifSubSeis))); + } + + if (!hasFullWidth) { + // see if this was full-width + IntSummaryStatistics rowStats = cluster.subSects.stream().mapToInt(s->s.getSubSectionIndexDownDip()).summaryStatistics(); + if (rowStats.getMin() == 0 && rowStats.getMax() == rowColOrganized.size()-1) { + // have a full width rupture + hasFullWidth = true; + subSeisFinalizeFuture = gifWriteFuture.thenRun(new Runnable() { + + @Override + public void run() { + try { + gifSubSeis.finalizeAnimation(); + } catch (IOException e) { + throw ExceptionUtils.asRuntimeException(e); + } + } + }); + } + } + } + gifWriteFuture.join(); + gif.finalizeAnimation(); + if (subSeisFinalizeFuture == null) + gifSubSeis.finalizeAnimation(); + else + subSeisFinalizeFuture.join(); + System.out.println("\tDone with animation"); + } + } + } + + private static class WriteFrameRunnable implements Runnable { + + private BufferedImage img; + private AnimatedGIFRenderer gif; + + public WriteFrameRunnable(BufferedImage img, AnimatedGIFRenderer gif) { + this.img = img; + this.gif = gif; + } + + @Override + public void run() { + try { + gif.writeFrame(img); + } catch (IOException e) { + throw ExceptionUtils.asRuntimeException(e); + } + } + + } + + private static final DecimalFormat frameDF = new DecimalFormat("0000"); + private static final DecimalFormat twoDF = new DecimalFormat("0.00"); + +} From c0d9518161457b7f6e30af4cdebbdfe7f03ae7fc Mon Sep 17 00:00:00 2001 From: Kevin Milner Date: Fri, 30 Jan 2026 12:25:12 -0800 Subject: [PATCH 03/13] now builds rupture sets --- .../nshm26/DownDipRupSetBuildingTests.java | 42 +++++++++++++++---- 1 file changed, 35 insertions(+), 7 deletions(-) diff --git a/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java b/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java index 54af03d5..2604a05c 100644 --- a/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java +++ b/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java @@ -18,11 +18,17 @@ import org.opensha.commons.gui.plot.PlotUtils; import org.opensha.commons.mapping.gmt.elements.GMT_CPT_Files; import org.opensha.commons.util.cpt.CPT; +import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; import org.opensha.sha.earthquake.faultSysSolution.RupSetScalingRelationship; +import org.opensha.sha.earthquake.faultSysSolution.RuptureSets.RectangularDownDipSubductionRupSetConfig; +import org.opensha.sha.earthquake.faultSysSolution.RuptureSets.RupSetConfig; +import org.opensha.sha.earthquake.faultSysSolution.reports.ReportPageGen; +import org.opensha.sha.earthquake.faultSysSolution.reports.ReportPageGen.PlotLevel; import org.opensha.sha.earthquake.faultSysSolution.ruptures.FaultSubsectionCluster; import org.opensha.sha.earthquake.faultSysSolution.ruptures.downDip.RectangularDownDipGrowingStrategy; import org.opensha.sha.earthquake.faultSysSolution.ruptures.downDip.RectangularDownDipGrowingStrategy.NeighborOverlaps; import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.GeoJSONFaultReader; +import org.opensha.sha.earthquake.faultSysSolution.util.FaultSysTools; import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_SubductionScalingRelationships; import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.faultSurface.GeoJSONFaultSection; @@ -35,6 +41,7 @@ public class DownDipRupSetBuildingTests { public static void main(String[] args) throws IOException { String prefix = "ker_slab2"; +// String prefix = "izu_slab2"; File inDir = new File("/tmp/"+prefix); File subSectsFile = new File(inDir, prefix+"_sub_sects.geojson"); @@ -43,7 +50,9 @@ public static void main(String[] args) throws IOException { Preconditions.checkState(outDir.exists() || outDir.mkdir()); boolean doAnimation = true; + boolean doSubSeisAnimation = false; boolean writeIndvFrames = true; + boolean buildRupSet = true; FaultSubsectionCluster fullCluster = new FaultSubsectionCluster(sects); NeighborOverlaps neighborOverlaps = new NeighborOverlaps(fullCluster, false); @@ -127,7 +136,8 @@ public static void main(String[] args) throws IOException { double maxMag = 0d; AnimatedGIFRenderer gif = new AnimatedGIFRenderer(new File(animDir, sectPrefix+"_rups.gif"), gifFPS, true); - AnimatedGIFRenderer gifSubSeis = new AnimatedGIFRenderer(new File(animDir, sectPrefix+"_rups_sub_seis.gif"), 0.5, true); + AnimatedGIFRenderer gifSubSeis = doSubSeisAnimation ? + new AnimatedGIFRenderer(new File(animDir, sectPrefix+"_rups_sub_seis.gif"), 0.5, true) : null; File fullFrameDir = new File(outDir, sectPrefix+"_rups"); Preconditions.checkState(!writeIndvFrames || fullFrameDir.exists() || fullFrameDir.mkdir()); @@ -163,6 +173,10 @@ else if (cluster.contains(sect)) gp.drawGraphPanel(plot, false, false, mapMaker.getXRange(), mapMaker.getYRange()); + double tick = mapMaker.getAxisTick(); + PlotUtils.setXTick(gp, tick); + PlotUtils.setYTick(gp, tick); + int height = PlotUtils.calcHeight(gp, gifWidth, true); gp.getChartPanel().setSize(gifWidth, height); @@ -175,14 +189,14 @@ else if (cluster.contains(sect)) if (gifWriteFuture != null) gifWriteFuture.join(); - if (hasFullWidth) { + if (hasFullWidth || !doSubSeisAnimation) { gifWriteFuture = CompletableFuture.runAsync(new WriteFrameRunnable(img, gif)); } else { gifWriteFuture = CompletableFuture.allOf(CompletableFuture.runAsync(new WriteFrameRunnable(img, gif)), CompletableFuture.runAsync(new WriteFrameRunnable(img, gifSubSeis))); } - if (!hasFullWidth) { + if (!hasFullWidth && doSubSeisAnimation) { // see if this was full-width IntSummaryStatistics rowStats = cluster.subSects.stream().mapToInt(s->s.getSubSectionIndexDownDip()).summaryStatistics(); if (rowStats.getMin() == 0 && rowStats.getMax() == rowColOrganized.size()-1) { @@ -204,13 +218,27 @@ public void run() { } gifWriteFuture.join(); gif.finalizeAnimation(); - if (subSeisFinalizeFuture == null) - gifSubSeis.finalizeAnimation(); - else - subSeisFinalizeFuture.join(); + if (doSubSeisAnimation) { + if (subSeisFinalizeFuture == null) + gifSubSeis.finalizeAnimation(); + else + subSeisFinalizeFuture.join(); + } System.out.println("\tDone with animation"); } } + + if (buildRupSet) { + RupSetConfig rsConfig = new RectangularDownDipSubductionRupSetConfig(sects, scale); + FaultSystemRupSet rupSet = rsConfig.build(FaultSysTools.defaultNumThreads()); + rupSet.write(new File(outDir, "rupture_set.zip")); + File reportDir = new File(outDir, "rupture_set_report"); + + ReportPageGen report = new ReportPageGen(rupSet, null, sects.get(0).getParentSectionName()+" Test Rupture Set", + reportDir, ReportPageGen.getDefaultRupSetPlots(PlotLevel.DEFAULT)); + report.setReplot(true); + report.generatePage(); + } } private static class WriteFrameRunnable implements Runnable { From f137750c858d06fd6b72748710c69111c2abd8b7 Mon Sep 17 00:00:00 2001 From: Kevin Milner Date: Fri, 30 Jan 2026 13:58:39 -0800 Subject: [PATCH 04/13] initial slip rate projection tests --- .../nshm26/DownDipInterfaceSubSectTests.java | 25 ++++++++++ .../kevin/nshm26/SlipProjectionTests.java | 46 +++++++++++++++++++ 2 files changed, 71 insertions(+) create mode 100644 src/main/java/scratch/kevin/nshm26/SlipProjectionTests.java diff --git a/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java b/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java index 7249ff1d..7dd131bf 100644 --- a/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java +++ b/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java @@ -33,6 +33,7 @@ import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.GeoJSONFaultReader; import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_SubductionScalingRelationships; import org.opensha.sha.faultSurface.DownDipSubsectionBuilder; +import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.faultSurface.FaultTrace; import org.opensha.sha.faultSurface.GeoJSONFaultSection; @@ -344,6 +345,28 @@ public static void main(String[] args) throws IOException { System.out.println("Sub-Sect Dip:\t"+dipRange); System.out.println("Sub-Sect Row counts:\t"+perRowRange); + System.out.println(); + System.out.println("Row average stats:"); + for (int row=0; row loadDepthContours(BufferedReader in) throws IOEx private static String traceStr(FaultTrace trace) { boolean resampled = false; + double avgDepth = trace.stream().mapToDouble(l->l.depth).average().getAsDouble(); if (trace.size() > 5) { trace = FaultUtils.resampleTrace(trace, 4); resampled = true; @@ -746,6 +770,7 @@ private static String traceStr(FaultTrace trace) { .append(", ").append(oDF.format(loc.depth)).append("]"); } str.append("; strike=").append(oDF.format(LocationUtils.azimuth(trace.first(), trace.last()))); + str.append(", avg depth=").append(oDF.format(avgDepth)).append(" km"); if (resampled) str.append(" (resampled)"); return str.toString(); diff --git a/src/main/java/scratch/kevin/nshm26/SlipProjectionTests.java b/src/main/java/scratch/kevin/nshm26/SlipProjectionTests.java new file mode 100644 index 00000000..2335189e --- /dev/null +++ b/src/main/java/scratch/kevin/nshm26/SlipProjectionTests.java @@ -0,0 +1,46 @@ +package scratch.kevin.nshm26; + +import java.io.File; +import java.io.IOException; +import java.util.List; + +import org.opensha.commons.gui.plot.GeographicMapMaker; +import org.opensha.commons.mapping.gmt.elements.GMT_CPT_Files; +import org.opensha.commons.util.cpt.CPT; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.GeoJSONFaultReader; +import org.opensha.sha.faultSurface.FaultSection; +import org.opensha.sha.faultSurface.GeoJSONFaultSection; + +import com.google.common.base.Preconditions; + +public class SlipProjectionTests { + + public static void main(String[] args) throws IOException { + String prefix = "ker_slab2"; +// String prefix = "izu_slab2"; + + File inDir = new File("/tmp/"+prefix); + File subSectsFile = new File(inDir, prefix+"_sub_sects.geojson"); + List sects = GeoJSONFaultReader.readFaultSections(subSectsFile); + File outDir = new File(inDir, "slip_projection"); + Preconditions.checkState(outDir.exists() || outDir.mkdir()); + + CPT dipCPT = GMT_CPT_Files.SEQUENTIAL_LAJOLLA_UNIFORM.instance().reverse().rescale(0d, 60d); + double maxRatio = 1.5; + CPT ratioCPT = GMT_CPT_Files.DIVERGING_VIK_UNIFORM.instance().rescale(0d, 2d).trim(1d, 2d).rescale(1d, maxRatio); + + GeographicMapMaker mapMaker = new GeographicMapMaker(sects); + mapMaker.setWriteGeoJSON(false); + mapMaker.setFillSurfaces(true); + + mapMaker.plotSectScalars(s->s.getAveDip(), dipCPT, "Dip (degrees)"); + mapMaker.plot(outDir, "dip", " "); + + // cos(dip) = horizontal / on-plane + // on-plane = horizontal / cos(dip) + mapMaker.plotSectScalars(s->1d/Math.cos(Math.toRadians(s.getAveDip())), + ratioCPT, "Projected / Horizontal Slip Rate Ratio (dead on)"); + mapMaker.plot(outDir, "slip_ratio_dead_on", " "); + } + +} From aa81c10c70fb04cbf69c22693f6cdd1f6744bc32 Mon Sep 17 00:00:00 2001 From: Kevin Milner Date: Mon, 2 Feb 2026 09:24:44 -0800 Subject: [PATCH 05/13] example 3D geojson file --- .../kevin/nshm26/GeoJSON3DExample.java | 62 +++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 src/main/java/scratch/kevin/nshm26/GeoJSON3DExample.java diff --git a/src/main/java/scratch/kevin/nshm26/GeoJSON3DExample.java b/src/main/java/scratch/kevin/nshm26/GeoJSON3DExample.java new file mode 100644 index 00000000..478180ca --- /dev/null +++ b/src/main/java/scratch/kevin/nshm26/GeoJSON3DExample.java @@ -0,0 +1,62 @@ +package scratch.kevin.nshm26; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Random; + +import org.opensha.commons.geo.Location; +import org.opensha.commons.geo.LocationList; +import org.opensha.commons.geo.Region; +import org.opensha.commons.gui.plot.GeographicMapMaker; +import org.opensha.commons.gui.plot.PlotCurveCharacterstics; +import org.opensha.commons.gui.plot.PlotSymbol; +import org.opensha.commons.mapping.gmt.elements.GMT_CPT_Files; +import org.opensha.commons.util.cpt.CPT; +import org.opensha.sha.earthquake.faultSysSolution.util.FaultSectionUtils; +import org.opensha.sha.earthquake.rupForecastImpl.nshm23.logicTree.NSHM23_DeformationModels; +import org.opensha.sha.earthquake.rupForecastImpl.nshm23.logicTree.NSHM23_FaultModels; +import org.opensha.sha.faultSurface.FaultSection; +import org.opensha.sha.util.NEHRP_TestCity; + +public class GeoJSON3DExample { + + public static void main(String[] args) throws IOException { + List allSects = NSHM23_DeformationModels.GEOLOGIC.build(NSHM23_FaultModels.WUS_FM_v3); + Region region = new Region(new Location(33, -118), new Location(35, -120)); + List plotSects = new ArrayList<>(); + for (FaultSection sect : allSects) + if (FaultSectionUtils.sectInRegion(region, sect, false)) + plotSects.add(sect); + System.out.println("Keeping "+plotSects.size()+" sections"); + + GeographicMapMaker mapMaker = new GeographicMapMaker(region, plotSects); + + mapMaker.setFillSurfaces(true); + mapMaker.setFillVerticalSurfaces(true); + + CPT slipCPT = GMT_CPT_Files.SEQUENTIAL_BATLOW_UNIFORM.instance().rescale(0d, 10d); + mapMaker.plotSectScalars(s->s.getOrigAveSlipRate(), slipCPT, "Slip Rate (mm/yr)"); + + CPT colorCPT = GMT_CPT_Files.CATEGORICAL_TAB10.instance().rescale(0d, 1d); + + Random r = new Random(123456l); + + // add some scatter data + LocationList scatters = new LocationList(); + List scatterChars = new ArrayList<>(); + for (NEHRP_TestCity city : NEHRP_TestCity.values()) { + Location loc = city.location(); + if (region.contains(loc)) { + scatters.add(loc); + scatterChars.add(new PlotCurveCharacterstics(PlotSymbol.FILLED_CIRCLE, (float)(12d*r.nextDouble()), colorCPT.getColor(r.nextDouble()))); + } + } + mapMaker.plotScatters(scatters, scatterChars); + + mapMaker.setWriteGeoJSON(true); + mapMaker.plot(new File("/tmp"), "geo3d_test", " "); + } + +} From 8f534e58723e3c91ae30ac6704c0e14a1fe9b59b Mon Sep 17 00:00:00 2001 From: Kevin Milner Date: Mon, 2 Feb 2026 09:25:03 -0800 Subject: [PATCH 06/13] now outputs map goejson as well --- .../nshm26/DownDipInterfaceSubSectTests.java | 46 ++++++++++--------- 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java b/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java index 7dd131bf..ed181193 100644 --- a/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java +++ b/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java @@ -39,6 +39,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Range; +import com.google.common.io.Files; import net.mahdilamb.colormap.Colors; @@ -47,31 +48,31 @@ public class DownDipInterfaceSubSectTests { public static void main(String[] args) throws IOException { Class resourceClass = GeoJSONFaultSection.class; - String faultName = "Kermadec"; - int faultID = 0; - String resourcePrefix = "/data/erf/nshm26/amsam/fault_models/subduction/"; - BufferedReader countoursIn = new BufferedReader(new InputStreamReader( - resourceClass.getResourceAsStream(resourcePrefix+"ker_slab2_dep_10km_contours.xyz"))); - CSVFile traceCSV = CSVFile.readStream( - resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_ker.csv"), true); - boolean smoothTraceForDDW = true; - String prefix = "ker_slab2"; - Range depthRange = Range.closed(0d, 60d); - Range lonFilter = null; - Range latFilter = Range.atLeast(-30d); - -// String faultName = "Izu-Bonin"; +// String faultName = "Kermadec"; // int faultID = 0; -// String resourcePrefix = "/data/erf/nshm26/gnmi/fault_models/subduction/"; +// String resourcePrefix = "/data/erf/nshm26/amsam/fault_models/subduction/"; // BufferedReader countoursIn = new BufferedReader(new InputStreamReader( -// GeoJSONFaultSection.class.getResourceAsStream(resourcePrefix+"izu_slab2_dep_10km_contours.xyz"))); +// resourceClass.getResourceAsStream(resourcePrefix+"ker_slab2_dep_10km_contours.xyz"))); // CSVFile traceCSV = CSVFile.readStream( -// resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_izu.csv"), true); +// resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_ker.csv"), true); // boolean smoothTraceForDDW = true; -// String prefix = "izu_slab2"; +// String prefix = "ker_slab2"; // Range depthRange = Range.closed(0d, 60d); // Range lonFilter = null; -// Range latFilter = Range.atMost(27d); +// Range latFilter = Range.atLeast(-30d); + + String faultName = "Izu-Bonin"; + int faultID = 0; + String resourcePrefix = "/data/erf/nshm26/gnmi/fault_models/subduction/"; + BufferedReader countoursIn = new BufferedReader(new InputStreamReader( + GeoJSONFaultSection.class.getResourceAsStream(resourcePrefix+"izu_slab2_dep_10km_contours.xyz"))); + CSVFile traceCSV = CSVFile.readStream( + resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_izu.csv"), true); + boolean smoothTraceForDDW = true; + String prefix = "izu_slab2"; + Range depthRange = Range.closed(0d, 60d); + Range lonFilter = null; + Range latFilter = Range.atMost(27d); File outputDir = new File("/tmp/"+prefix); Preconditions.checkArgument(outputDir.exists() || outputDir.mkdir()); @@ -367,10 +368,13 @@ public static void main(String[] args) throws IOException { + ", Mmin="+twoDF.format(magRange.getAverage())); } - mapMaker.setWriteGeoJSON(false); mapMaker.plot(outputDir, prefix+"_sub_sects", " "); - GeoJSONFaultReader.writeFaultSections(new File(outputDir, prefix+"_sub_sects.geojson"), allSects); + File geoJSONFile = new File(outputDir, prefix+"_sub_sects.geojson"); + + Files.move(geoJSONFile, new File(outputDir, prefix+"_sub_sects_map.geojson")); + + GeoJSONFaultReader.writeFaultSections(geoJSONFile, allSects); } private static List processContours(List contours, FaultTrace upper) { From b8141d69a58d66e19a7e8b7d4763240f0a485047 Mon Sep 17 00:00:00 2001 From: Kevin Milner Date: Wed, 11 Feb 2026 12:41:26 -0800 Subject: [PATCH 07/13] additional plots --- .../nshm26/DownDipInterfaceSubSectTests.java | 41 ++++++++++--------- .../nshm26/DownDipRupSetBuildingTests.java | 27 +++++++++++- 2 files changed, 48 insertions(+), 20 deletions(-) diff --git a/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java b/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java index ed181193..cb739be8 100644 --- a/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java +++ b/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java @@ -48,33 +48,36 @@ public class DownDipInterfaceSubSectTests { public static void main(String[] args) throws IOException { Class resourceClass = GeoJSONFaultSection.class; -// String faultName = "Kermadec"; -// int faultID = 0; -// String resourcePrefix = "/data/erf/nshm26/amsam/fault_models/subduction/"; -// BufferedReader countoursIn = new BufferedReader(new InputStreamReader( -// resourceClass.getResourceAsStream(resourcePrefix+"ker_slab2_dep_10km_contours.xyz"))); -// CSVFile traceCSV = CSVFile.readStream( -// resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_ker.csv"), true); -// boolean smoothTraceForDDW = true; -// String prefix = "ker_slab2"; -// Range depthRange = Range.closed(0d, 60d); -// Range lonFilter = null; -// Range latFilter = Range.atLeast(-30d); + File baseOutputDir = new File("/home/kevin/OpenSHA/nshm26/down-dip-subsectioning"); + Preconditions.checkState(baseOutputDir.exists() || baseOutputDir.mkdir()); - String faultName = "Izu-Bonin"; + String faultName = "Kermadec"; int faultID = 0; - String resourcePrefix = "/data/erf/nshm26/gnmi/fault_models/subduction/"; + String resourcePrefix = "/data/erf/nshm26/amsam/fault_models/subduction/"; BufferedReader countoursIn = new BufferedReader(new InputStreamReader( - GeoJSONFaultSection.class.getResourceAsStream(resourcePrefix+"izu_slab2_dep_10km_contours.xyz"))); + resourceClass.getResourceAsStream(resourcePrefix+"ker_slab2_dep_10km_contours.xyz"))); CSVFile traceCSV = CSVFile.readStream( - resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_izu.csv"), true); + resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_ker.csv"), true); boolean smoothTraceForDDW = true; - String prefix = "izu_slab2"; + String prefix = "ker_slab2"; Range depthRange = Range.closed(0d, 60d); Range lonFilter = null; - Range latFilter = Range.atMost(27d); + Range latFilter = Range.atLeast(-30d); + +// String faultName = "Izu-Bonin"; +// int faultID = 0; +// String resourcePrefix = "/data/erf/nshm26/gnmi/fault_models/subduction/"; +// BufferedReader countoursIn = new BufferedReader(new InputStreamReader( +// GeoJSONFaultSection.class.getResourceAsStream(resourcePrefix+"izu_slab2_dep_10km_contours.xyz"))); +// CSVFile traceCSV = CSVFile.readStream( +// resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_izu.csv"), true); +// boolean smoothTraceForDDW = true; +// String prefix = "izu_slab2"; +// Range depthRange = Range.closed(0d, 60d); +// Range lonFilter = null; +// Range latFilter = Range.atMost(27d); - File outputDir = new File("/tmp/"+prefix); + File outputDir = new File(baseOutputDir, prefix); Preconditions.checkArgument(outputDir.exists() || outputDir.mkdir()); double traceSmoothDist = 200d; diff --git a/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java b/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java index 2604a05c..315a5cea 100644 --- a/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java +++ b/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java @@ -40,10 +40,13 @@ public class DownDipRupSetBuildingTests { public static void main(String[] args) throws IOException { + File baseOutputDir = new File("/home/kevin/OpenSHA/nshm26/down-dip-subsectioning"); + Preconditions.checkState(baseOutputDir.exists() || baseOutputDir.mkdir()); + String prefix = "ker_slab2"; // String prefix = "izu_slab2"; - File inDir = new File("/tmp/"+prefix); + File inDir = new File(baseOutputDir, prefix); File subSectsFile = new File(inDir, prefix+"_sub_sects.geojson"); List sects = GeoJSONFaultReader.readFaultSections(subSectsFile); File outDir = new File(inDir, "rup_set_debug"); @@ -52,6 +55,7 @@ public static void main(String[] args) throws IOException { boolean doAnimation = true; boolean doSubSeisAnimation = false; boolean writeIndvFrames = true; + boolean doMiddleRowOverlaps = true; boolean buildRupSet = true; FaultSubsectionCluster fullCluster = new FaultSubsectionCluster(sects); @@ -108,6 +112,27 @@ public static void main(String[] args) throws IOException { int gifWidth = 800; double gifFPS = 5; + if (doMiddleRowOverlaps) { + // do middle row down-dip + File middleRowDir = new File(outDir, "middle_row_overlap_dd"); + Preconditions.checkArgument(middleRowDir.exists() || middleRowDir.mkdir()); + for (FaultSection sect : middleRow) { + mapMaker.plotSectScalars(s-> s == sect ? Double.NaN : neighborOverlapsDD.getOverlap(sect, s), + overlapCPT, "Fractional Overlap"); + + mapMaker.plot(middleRowDir, "col"+frameDF.format(sect.getSubSectionIndexAlong())+"_sect"+sect.getSectionId(), " "); + + if (sect.getSectionId() == 361 || sect.getSectionId() == 362) { + System.out.println("DD overlap debug from "+sect.getSectionName()); + for (FaultSection oSect : sects) { + double overlap = neighborOverlapsDD.getOverlap(sect, oSect); + if (overlap > 0) + System.out.println("\t"+oSect.getSectionName()+":\t"+overlap); + } + } + } + } + File animDir = new File(outDir, "animations"); Preconditions.checkArgument(!doAnimation || animDir.exists() || animDir.mkdir()); From 9c56ba53847a4d7aba9cd1f67c5bd76b6c7dd92b Mon Sep 17 00:00:00 2001 From: Kevin Milner Date: Mon, 2 Mar 2026 14:56:57 -0800 Subject: [PATCH 08/13] udpates for upstream --- .../earthquake/nshmp/model/NshmSurface.java | 14 ++ .../nshm23/NewCompoundSurfaceComparisons.java | 165 ++++++++++++++++++ .../nshm26/DownDipRupSetBuildingTests.java | 2 +- 3 files changed, 180 insertions(+), 1 deletion(-) create mode 100644 src/main/java/scratch/kevin/nshm23/NewCompoundSurfaceComparisons.java diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/NshmSurface.java b/src/main/java/gov/usgs/earthquake/nshmp/model/NshmSurface.java index f184f7c0..77eece09 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/NshmSurface.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/NshmSurface.java @@ -121,6 +121,20 @@ public Location getLastLocOnUpperEdge() { .getLastLocOnUpperEdge()); } + @Override + public Location getFirstLocOnLowerEdge() { + // this will only be asked for by OpenSHA CompoundSurface + gov.usgs.earthquake.nshmp.fault.surface.GriddedSurface gridDelegate = (gov.usgs.earthquake.nshmp.fault.surface.GriddedSurface) delegate; + return NshmUtil.toOpenShaLocation(gridDelegate.getLocation(gridDelegate.getNumRows()-1, 0)); + } + + @Override + public Location getLastLocOnLowerEdge() { + // this will only be asked for by OpenSHA CompoundSurface + gov.usgs.earthquake.nshmp.fault.surface.GriddedSurface gridDelegate = (gov.usgs.earthquake.nshmp.fault.surface.GriddedSurface) delegate; + return NshmUtil.toOpenShaLocation(gridDelegate.getLocation(gridDelegate.getNumRows()-1, gridDelegate.getNumCols()-1)); + } + // Caching @Override diff --git a/src/main/java/scratch/kevin/nshm23/NewCompoundSurfaceComparisons.java b/src/main/java/scratch/kevin/nshm23/NewCompoundSurfaceComparisons.java new file mode 100644 index 00000000..58764e57 --- /dev/null +++ b/src/main/java/scratch/kevin/nshm23/NewCompoundSurfaceComparisons.java @@ -0,0 +1,165 @@ +package scratch.kevin.nshm23; + +import java.awt.Color; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.opensha.commons.geo.GriddedRegion; +import org.opensha.commons.geo.Location; +import org.opensha.commons.geo.LocationList; +import org.opensha.commons.geo.LocationUtils; +import org.opensha.commons.geo.LocationUtils.LocationAverager; +import org.opensha.commons.geo.LocationVector; +import org.opensha.commons.gui.plot.GeographicMapMaker; +import org.opensha.commons.gui.plot.PlotCurveCharacterstics; +import org.opensha.commons.gui.plot.PlotLineType; +import org.opensha.commons.gui.plot.PlotSymbol; +import org.opensha.commons.mapping.gmt.elements.GMT_CPT_Files; +import org.opensha.commons.util.cpt.CPT; +import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; +import org.opensha.sha.earthquake.faultSysSolution.FaultSystemSolution; +import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader; +import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.FaultSection; +import org.opensha.sha.faultSurface.NewCompoundSurface; +import org.opensha.sha.faultSurface.RuptureSurface; + +import com.google.common.base.Preconditions; + +import net.mahdilamb.colormap.Colors; + +public class NewCompoundSurfaceComparisons { + + public static void main(String[] args) throws IOException { + FaultSystemSolution sol = FaultSystemSolution.load(new File("/home/kevin/OpenSHA/nshm23/batch_inversions/" + + "2024_02_02-nshm23_branches-WUS_FM_v3/results_WUS_FM_v3_branch_averaged.zip")); + FaultSystemRupSet rupSet = sol.getRupSet(); + + GriddedRegion sitesGrid = new GriddedRegion(NSHM23_RegionLoader.loadFullConterminousWUS(), 0.25, GriddedRegion.ANCHOR_0_0); + + System.out.println("Have "+sitesGrid.getNodeCount()+" sites"); + + CompoundSurface[] origSurfs = new CompoundSurface[rupSet.getNumRuptures()]; + NewCompoundSurface[] newSurfs = new NewCompoundSurface[rupSet.getNumRuptures()]; + + File outputDir = new File("/tmp/compound_surf_test"); + Preconditions.checkState(outputDir.exists() || outputDir.mkdir()); + + int numDebug = 0; + int maxNumDebug = 100; + + for (int r=0; r sects = rupSet.getFaultSectionDataForRupture(rupIndex); + GeographicMapMaker mapMaker = new GeographicMapMaker(sects); + mapMaker.setWriteGeoJSON(false); + mapMaker.setWritePDFs(false); + + + +// System.out.println("\tOrig dip: "+origSurf.getAveDip()); +// System.out.println("\tNew dip: "+newSurf.getAveDip()); +// System.out.println("\tOrig strike: "+origSurf.getAveStrike()); +// System.out.println("\tNew strike: "+newSurf.getAveStrike()); + + LocationList scatters = new LocationList(); + List scatterChars = new ArrayList<>(); + + scatters.add(origSurf.getFirstLocOnUpperEdge()); + scatterChars.add(new PlotCurveCharacterstics(PlotSymbol.FILLED_CIRCLE, 12f, Colors.tab_green)); + scatters.add(origSurf.getLastLocOnUpperEdge()); + scatterChars.add(new PlotCurveCharacterstics(PlotSymbol.FILLED_CIRCLE, 12f, Colors.tab_red)); + scatters.add(origSurf.getFirstLocOnLowerEdge()); + scatterChars.add(new PlotCurveCharacterstics(PlotSymbol.FILLED_CIRCLE, 10f, Colors.tab_green)); + scatters.add(origSurf.getLastLocOnLowerEdge()); + scatterChars.add(new PlotCurveCharacterstics(PlotSymbol.FILLED_CIRCLE, 10f, Colors.tab_red)); + scatters.add(newSurf.getFirstLocOnUpperEdge()); + scatterChars.add(new PlotCurveCharacterstics(PlotSymbol.FILLED_SQUARE, 7f, Colors.tab_green.darker().darker())); + scatters.add(newSurf.getLastLocOnUpperEdge()); + scatterChars.add(new PlotCurveCharacterstics(PlotSymbol.FILLED_SQUARE, 7f, Colors.tab_red.darker().darker())); + scatters.add(newSurf.getFirstLocOnLowerEdge()); + scatterChars.add(new PlotCurveCharacterstics(PlotSymbol.FILLED_SQUARE, 5f, Colors.tab_green.darker().darker())); + scatters.add(newSurf.getLastLocOnLowerEdge()); + scatterChars.add(new PlotCurveCharacterstics(PlotSymbol.FILLED_SQUARE, 5f, Colors.tab_red.darker().darker())); + + LocationList origUpper = origSurf.getUpperEdge(); + LocationList newUpper = newSurf.getUpperEdge(); + +// double strike = newSurf.getAveStrike(); +// CPT orderCPT = GMT_CPT_Files.RAINBOW_UNIFORM.instance(); +// orderCPT = orderCPT.rescale(0d, origUpper.size()-1d); +// LocationVector offset = new LocationVector(strike+90d, 2d, 0d); +// for (int i=0; i lines = new ArrayList<>(); + List lineChars = new ArrayList<>(); + lines.add(origUpper); + lineChars.add(new PlotCurveCharacterstics(PlotLineType.SOLID, 4f, Colors.tab_orange)); + lines.add(newUpper); + lineChars.add(new PlotCurveCharacterstics(PlotLineType.SOLID, 4f, Colors.tab_blue)); + lines.add(origUpper); + lineChars.add(new PlotCurveCharacterstics(PlotLineType.SOLID, 1f, Colors.tab_orange)); + mapMaker.plotLines(lines, lineChars); + + List arrows = new ArrayList<>(); + for (int s=1; s Date: Fri, 6 Mar 2026 17:33:11 -0800 Subject: [PATCH 09/13] for upstream --- .../kevin/nshm23/ClusterSubsetTests.java | 2 +- .../HardcodedInversionFactoryRunner.java | 29 ++++++++---- .../ruptures/PaperJumpCleanFigureGen.java | 2 +- .../segModelTests/MaxDistSegComparisons.java | 2 +- .../segModelTests/SegMFDsComparePageGen.java | 2 +- .../nshm26/DownDipInterfaceSubSectTests.java | 45 ++++++++++--------- .../nshm26/DownDipRupSetBuildingTests.java | 33 ++++++++------ .../kevin/nshm26/SlipProjectionTests.java | 4 +- 8 files changed, 72 insertions(+), 47 deletions(-) diff --git a/src/main/java/scratch/kevin/nshm23/ClusterSubsetTests.java b/src/main/java/scratch/kevin/nshm23/ClusterSubsetTests.java index 9a69beee..b45058e6 100644 --- a/src/main/java/scratch/kevin/nshm23/ClusterSubsetTests.java +++ b/src/main/java/scratch/kevin/nshm23/ClusterSubsetTests.java @@ -94,7 +94,7 @@ public static void main(String[] args) throws IOException { .forScalingRelationship(scale).build(); if (plausibility != null) rupSet.addModule(plausibility); - rupSet.addModule(ClusterRuptures.singleStranged(rupSet)); + rupSet.addModule(ClusterRuptures.singleStranded(rupSet)); } else { rupSet = ClusterRuptureBuilder.buildClusterRupSet(scale, rupSet.getFaultSectionDataList(), plausibility, cRups.getAll()); } diff --git a/src/main/java/scratch/kevin/nshm23/HardcodedInversionFactoryRunner.java b/src/main/java/scratch/kevin/nshm23/HardcodedInversionFactoryRunner.java index 7db01564..3da48d72 100644 --- a/src/main/java/scratch/kevin/nshm23/HardcodedInversionFactoryRunner.java +++ b/src/main/java/scratch/kevin/nshm23/HardcodedInversionFactoryRunner.java @@ -47,10 +47,13 @@ import org.opensha.sha.earthquake.rupForecastImpl.nshm23.prior2018.NSHM18_DeformationModels; import org.opensha.sha.earthquake.rupForecastImpl.nshm23.prior2018.NSHM18_FaultModels; import org.opensha.sha.earthquake.rupForecastImpl.nshm23.prior2018.NSHM18_LogicTreeBranch; +import org.opensha.sha.earthquake.rupForecastImpl.nshm26.NSHM26_InvConfigFactory; +import org.opensha.sha.earthquake.rupForecastImpl.nshm26.logicTree.NSHM26_LogicTree; import org.opensha.sha.earthquake.rupForecastImpl.prvi25.PRVI25_InvConfigFactory; import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_CrustalDeformationModels; import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_CrustalFaultModels; import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_LogicTree; +import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_SubductionBValues; import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.magdist.IncrementalMagFreqDist; import org.opensha.sha.magdist.SparseGutenbergRichterSolver; @@ -133,14 +136,17 @@ public static void main(String[] args) throws IOException { // NSHM23_InvConfigFactory factory = new DefModSamplingEnabledInvConfig.ConnDistB0p5MidSegCorrCapSigma(); // dirName += "-nshm23-dm_sample_cap_sigma"; - PRVI25_InvConfigFactory factory = new PRVI25_InvConfigFactory(); - dirName += "-prvi25"; +// PRVI25_InvConfigFactory factory = new PRVI25_InvConfigFactory(); +// dirName += "-prvi25"; // PRVI25_InvConfigFactory factory = new PRVI25_InvConfigFactory.MueAsCrustal(); // dirName += "-prvi25-mue_as_crustal"; // PRVI25_InvConfigFactory factory = new PRVI25_InvConfigFactory.LimitCrustalBelowObserved_0p9(); // dirName += "-prvi25-limit_below_obs"; // PRVI25_InvConfigFactory.SUB_SECT_DDW_FRACT = 0.25; dirName += "-quarter_len_sub_sects"; + NSHM26_InvConfigFactory factory = new NSHM26_InvConfigFactory(); + dirName += "-nshm26"; + factory.setCacheDir(new File("/home/kevin/OpenSHA/nshm23/rup_sets/cache")); boolean writeRS = true; @@ -159,12 +165,19 @@ public static void main(String[] args) throws IOException { // branch.setValue(node); // writeGridProv = true; - LogicTreeBranch branch = new LogicTreeBranch<>(PRVI25_LogicTree.levelsSubductionCombined); - for (LogicTreeNode node : PRVI25_LogicTree.DEFAULT_SUBDUCTION_INTERFACE) - branch.setValue(node); - for (LogicTreeNode node : PRVI25_LogicTree.DEFAULT_SUBDUCTION_GRIDDED) - branch.setValue(node); - writeGridProv = true; +// LogicTreeBranch branch = new LogicTreeBranch<>(PRVI25_LogicTree.levelsSubductionCombined); +// for (LogicTreeNode node : PRVI25_LogicTree.DEFAULT_SUBDUCTION_INTERFACE) +// branch.setValue(node); +// for (LogicTreeNode node : PRVI25_LogicTree.DEFAULT_SUBDUCTION_GRIDDED) +// branch.setValue(node); +// writeGridProv = true; + + LogicTreeBranch branch = NSHM26_LogicTree.DEFAULT_GNMI_SUBDUCTION_INTERFACE; + dirName += "-gnmi"; +// LogicTreeBranch branch = NSHM26_LogicTree.DEFAULT_AMSAM_SUBDUCTION_INTERFACE; +// dirName += "-amsam"; + + branch.setValue(PRVI25_SubductionBValues.B_1p0); // branch.setValue(NSHM23_SegmentationModels.NONE); // branch.setValue(SupraSeisBValues.B_0p0); diff --git a/src/main/java/scratch/kevin/nshm23/ruptures/PaperJumpCleanFigureGen.java b/src/main/java/scratch/kevin/nshm23/ruptures/PaperJumpCleanFigureGen.java index abdb6659..99d41214 100644 --- a/src/main/java/scratch/kevin/nshm23/ruptures/PaperJumpCleanFigureGen.java +++ b/src/main/java/scratch/kevin/nshm23/ruptures/PaperJumpCleanFigureGen.java @@ -93,7 +93,7 @@ public static void main(String[] args) throws IOException { if (!rupSet.hasAvailableModule(ClusterRuptures.class)) { if (rupSet.getNumRuptures() == 253706) - rupSet.addModule(ClusterRuptures.singleStranged(rupSet)); + rupSet.addModule(ClusterRuptures.singleStranded(rupSet)); else rupSet.addAvailableModule(new Callable() { diff --git a/src/main/java/scratch/kevin/nshm23/segModelTests/MaxDistSegComparisons.java b/src/main/java/scratch/kevin/nshm23/segModelTests/MaxDistSegComparisons.java index cabcff31..35d76e25 100644 --- a/src/main/java/scratch/kevin/nshm23/segModelTests/MaxDistSegComparisons.java +++ b/src/main/java/scratch/kevin/nshm23/segModelTests/MaxDistSegComparisons.java @@ -38,7 +38,7 @@ public static void main(String[] args) throws IOException { + "results_FM3_1_CoulombRupSet_branch_averaged_reweight_r0_3.0.zip"); // + "node_branch_averaged/MaxDist_MaxDist3km.zip"); FaultSystemSolution sol = FaultSystemSolution.load(inputFile); - ClusterRuptures cRups = ClusterRuptures.singleStranged(sol.getRupSet()); + ClusterRuptures cRups = ClusterRuptures.singleStranded(sol.getRupSet()); PlausibilityConfiguration config = sol.getRupSet().getModule(PlausibilityConfiguration.class); ClusterConnectionStrategy connStrat = config.getConnectionStrategy(); SegmentationCalculator calc = new SegmentationCalculator(sol, cRups.getAll(), diff --git a/src/main/java/scratch/kevin/nshm23/segModelTests/SegMFDsComparePageGen.java b/src/main/java/scratch/kevin/nshm23/segModelTests/SegMFDsComparePageGen.java index 540b86a0..3064cdfb 100644 --- a/src/main/java/scratch/kevin/nshm23/segModelTests/SegMFDsComparePageGen.java +++ b/src/main/java/scratch/kevin/nshm23/segModelTests/SegMFDsComparePageGen.java @@ -331,7 +331,7 @@ public static void main(String[] args) throws IOException { } if (!rupSet.hasModule(ClusterRuptures.class)) - rupSet.addModule(ClusterRuptures.singleStranged(rupSet)); + rupSet.addModule(ClusterRuptures.singleStranded(rupSet)); ClusterRuptures cRups = rupSet.requireModule(ClusterRuptures.class); SolutionSlipRates primarySolSlips = primarySol.getModule(SolutionSlipRates.class); diff --git a/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java b/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java index cb739be8..a0ade755 100644 --- a/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java +++ b/src/main/java/scratch/kevin/nshm26/DownDipInterfaceSubSectTests.java @@ -51,38 +51,39 @@ public static void main(String[] args) throws IOException { File baseOutputDir = new File("/home/kevin/OpenSHA/nshm26/down-dip-subsectioning"); Preconditions.checkState(baseOutputDir.exists() || baseOutputDir.mkdir()); - String faultName = "Kermadec"; - int faultID = 0; - String resourcePrefix = "/data/erf/nshm26/amsam/fault_models/subduction/"; - BufferedReader countoursIn = new BufferedReader(new InputStreamReader( - resourceClass.getResourceAsStream(resourcePrefix+"ker_slab2_dep_10km_contours.xyz"))); - CSVFile traceCSV = CSVFile.readStream( - resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_ker.csv"), true); - boolean smoothTraceForDDW = true; - String prefix = "ker_slab2"; - Range depthRange = Range.closed(0d, 60d); - Range lonFilter = null; - Range latFilter = Range.atLeast(-30d); - -// String faultName = "Izu-Bonin"; +// String faultName = "Kermadec"; // int faultID = 0; -// String resourcePrefix = "/data/erf/nshm26/gnmi/fault_models/subduction/"; +// String resourcePrefix = "/data/erf/nshm26/amsam/fault_models/subduction/"; // BufferedReader countoursIn = new BufferedReader(new InputStreamReader( -// GeoJSONFaultSection.class.getResourceAsStream(resourcePrefix+"izu_slab2_dep_10km_contours.xyz"))); +// resourceClass.getResourceAsStream(resourcePrefix+"ker_slab2_dep_10km_contours.xyz"))); // CSVFile traceCSV = CSVFile.readStream( -// resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_izu.csv"), true); +// resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_ker.csv"), true); // boolean smoothTraceForDDW = true; -// String prefix = "izu_slab2"; +// String prefix = "ker_slab2"; // Range depthRange = Range.closed(0d, 60d); // Range lonFilter = null; -// Range latFilter = Range.atMost(27d); +// Range latFilter = Range.atLeast(-30d); + + String faultName = "Izu-Bonin"; + int faultID = 0; + String resourcePrefix = "/data/erf/nshm26/gnmi/fault_models/subduction/"; + BufferedReader countoursIn = new BufferedReader(new InputStreamReader( + GeoJSONFaultSection.class.getResourceAsStream(resourcePrefix+"izu_slab2_dep_10km_contours.xyz"))); + CSVFile traceCSV = CSVFile.readStream( + resourceClass.getResourceAsStream(resourcePrefix+"trenches_usgs_2017_depths_izu.csv"), true); + boolean smoothTraceForDDW = true; + String prefix = "izu_slab2"; + Range depthRange = Range.closed(0d, 60d); + Range lonFilter = null; + Range latFilter = Range.atMost(27d); File outputDir = new File(baseOutputDir, prefix); Preconditions.checkArgument(outputDir.exists() || outputDir.mkdir()); double traceSmoothDist = 200d; - + double scaleLength = 20d; +// double scaleLength = 15d; boolean scaleIsMax = false; boolean constantCount = false; @@ -299,6 +300,7 @@ public static void main(String[] args) throws IOException { MinMaxAveTracker ddwRange = new MinMaxAveTracker(); MinMaxAveTracker areaRange = new MinMaxAveTracker(); MinMaxAveTracker magRange = new MinMaxAveTracker(); + MinMaxAveTracker slipAtMminRange = new MinMaxAveTracker(); MinMaxAveTracker dipRange = new MinMaxAveTracker(); MinMaxAveTracker perRowRange = new MinMaxAveTracker(); for (GeoJSONFaultSection[] row : subSects) { @@ -311,6 +313,7 @@ public static void main(String[] args) throws IOException { ddwRange.addValue(ddw); areaRange.addValue(area); magRange.addValue(PRVI25_SubductionScalingRelationships.LOGA_C4p0.getMag(area*1e6, length*1e3, ddw*1e3, ddw*1e3, 90d)); + slipAtMminRange.addValue(PRVI25_SubductionScalingRelationships.LOGA_C4p0.getAveSlip(area*1e6, length*1e3, ddw*1e3, ddw*1e3, 90d)); dipRange.addValue(sect.getAveDip()); allSects.add(sect); } @@ -346,6 +349,7 @@ public static void main(String[] args) throws IOException { System.out.println("Sub-Sect DDWs:\t"+ddwRange); System.out.println("Sub-Sect Areas:\t"+areaRange); System.out.println("Sub-Sect Mmin:\t"+magRange); + System.out.println("Sub-Sect slip at Mmin:\t"+slipAtMminRange); System.out.println("Sub-Sect Dip:\t"+dipRange); System.out.println("Sub-Sect Row counts:\t"+perRowRange); @@ -753,7 +757,6 @@ private static List loadDepthContours(BufferedReader in) throws IOEx double depth = -Double.parseDouble(tok.nextToken()); curTrace.add(new Location(lat, lon, depth));; } - Collections.reverse(ret); return ret; } diff --git a/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java b/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java index a09e7b43..62a691d8 100644 --- a/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java +++ b/src/main/java/scratch/kevin/nshm26/DownDipRupSetBuildingTests.java @@ -29,11 +29,13 @@ import org.opensha.sha.earthquake.faultSysSolution.ruptures.downDip.RectangularDownDipGrowingStrategy.NeighborOverlaps; import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.GeoJSONFaultReader; import org.opensha.sha.earthquake.faultSysSolution.util.FaultSysTools; +import org.opensha.sha.earthquake.rupForecastImpl.nshm26.logicTree.NSHM26_SubductionInterfaceFaultModels; import org.opensha.sha.earthquake.rupForecastImpl.prvi25.logicTree.PRVI25_SubductionScalingRelationships; import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.faultSurface.GeoJSONFaultSection; import com.google.common.base.Preconditions; +import com.google.common.collect.Range; import net.mahdilamb.colormap.Colors; @@ -43,35 +45,40 @@ public static void main(String[] args) throws IOException { File baseOutputDir = new File("/home/kevin/OpenSHA/nshm26/down-dip-subsectioning"); Preconditions.checkState(baseOutputDir.exists() || baseOutputDir.mkdir()); - String prefix = "ker_slab2"; -// String prefix = "izu_slab2"; +// NSHM26_SubductionInterfaceFaultModels fm = NSHM26_SubductionInterfaceFaultModels.KERMADEC; +// String prefix = "ker_slab2"; + + NSHM26_SubductionInterfaceFaultModels fm = NSHM26_SubductionInterfaceFaultModels.MARIANA; + String prefix = "izu_slab2"; + + Range minSupraRange = Range.closed(10d, 40d); File inDir = new File(baseOutputDir, prefix); - File subSectsFile = new File(inDir, prefix+"_sub_sects.geojson"); - List sects = GeoJSONFaultReader.readFaultSections(subSectsFile); + List sects = fm.buildSubSects(fm); File outDir = new File(inDir, "rup_set_debug"); Preconditions.checkState(outDir.exists() || outDir.mkdir()); boolean doAnimation = true; boolean doSubSeisAnimation = false; boolean writeIndvFrames = true; - boolean doMiddleRowOverlaps = true; + boolean doMiddleRowOverlaps = false; boolean buildRupSet = true; FaultSubsectionCluster fullCluster = new FaultSubsectionCluster(sects); NeighborOverlaps neighborOverlaps = new NeighborOverlaps(fullCluster, false); NeighborOverlaps neighborOverlapsDD = new NeighborOverlaps(fullCluster, true); - List debugSects = new ArrayList<>(); + List debugSects = new ArrayList<>(); + debugSects.add(sects.get(276)); int numRows = sects.stream().mapToInt(s-> s.getSubSectionIndexDownDip()).max().getAsInt()+1; - List> rowColOrganized = new ArrayList<>(numRows); + List> rowColOrganized = new ArrayList<>(numRows); for (int i=0; i()); - for (GeoJSONFaultSection sect : sects) { + for (FaultSection sect : sects) { int row = sect.getSubSectionIndexDownDip(); int col = sect.getSubSectionIndexAlong(); - List rowSects = rowColOrganized.get(row); + List rowSects = rowColOrganized.get(row); while (rowSects.size() <= col) rowSects.add(null); rowSects.set(col, sect); @@ -80,14 +87,14 @@ public static void main(String[] args) throws IOException { System.out.println("Row "+row+" has "+rowColOrganized.get(row).size()+" sects"); // add corners - List topRow = rowColOrganized.get(0); - List bottomRow = rowColOrganized.get(numRows-1); + List topRow = rowColOrganized.get(0); + List bottomRow = rowColOrganized.get(numRows-1); debugSects.add(topRow.get(0)); debugSects.add(topRow.get(topRow.size()-1)); debugSects.add(bottomRow.get(0)); debugSects.add(bottomRow.get(bottomRow.size()-1)); - List middleRow = rowColOrganized.get(numRows/2); + List middleRow = rowColOrganized.get(numRows/2); debugSects.add(middleRow.get(0)); for (int i=1; i<5; i++) debugSects.add(middleRow.get((int)(middleRow.size() * i/5d))); @@ -105,7 +112,7 @@ public static void main(String[] args) throws IOException { Color participatingColor = overlapCPT.getMaxColor(); Color otherColor = overlapCPT.getMinColor(); - RectangularDownDipGrowingStrategy growingStrat = new RectangularDownDipGrowingStrategy(); + RectangularDownDipGrowingStrategy growingStrat = new RectangularDownDipGrowingStrategy(minSupraRange); RupSetScalingRelationship scale = PRVI25_SubductionScalingRelationships.LOGA_C4p0; HeadlessGraphPanel gp = PlotUtils.initScreenHeadless(); diff --git a/src/main/java/scratch/kevin/nshm26/SlipProjectionTests.java b/src/main/java/scratch/kevin/nshm26/SlipProjectionTests.java index 2335189e..dc8536c6 100644 --- a/src/main/java/scratch/kevin/nshm26/SlipProjectionTests.java +++ b/src/main/java/scratch/kevin/nshm26/SlipProjectionTests.java @@ -16,10 +16,12 @@ public class SlipProjectionTests { public static void main(String[] args) throws IOException { + File baseOutputDir = new File("/home/kevin/OpenSHA/nshm26/down-dip-subsectioning"); + String prefix = "ker_slab2"; // String prefix = "izu_slab2"; - File inDir = new File("/tmp/"+prefix); + File inDir = new File(baseOutputDir, prefix); File subSectsFile = new File(inDir, prefix+"_sub_sects.geojson"); List sects = GeoJSONFaultReader.readFaultSections(subSectsFile); File outDir = new File(inDir, "slip_projection"); From 7dddeeab3f21ff146492c933a57fd67c97735546 Mon Sep 17 00:00:00 2001 From: Kevin Milner Date: Mon, 9 Mar 2026 13:00:43 -0700 Subject: [PATCH 10/13] initial gnmi/amsam test runs --- .../kevin/nshm23/HardcodedInversionFactoryRunner.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/scratch/kevin/nshm23/HardcodedInversionFactoryRunner.java b/src/main/java/scratch/kevin/nshm23/HardcodedInversionFactoryRunner.java index 3da48d72..86dfd98c 100644 --- a/src/main/java/scratch/kevin/nshm23/HardcodedInversionFactoryRunner.java +++ b/src/main/java/scratch/kevin/nshm23/HardcodedInversionFactoryRunner.java @@ -172,10 +172,10 @@ public static void main(String[] args) throws IOException { // branch.setValue(node); // writeGridProv = true; - LogicTreeBranch branch = NSHM26_LogicTree.DEFAULT_GNMI_SUBDUCTION_INTERFACE; - dirName += "-gnmi"; -// LogicTreeBranch branch = NSHM26_LogicTree.DEFAULT_AMSAM_SUBDUCTION_INTERFACE; -// dirName += "-amsam"; +// LogicTreeBranch branch = NSHM26_LogicTree.DEFAULT_GNMI_SUBDUCTION_INTERFACE; +// dirName += "-gnmi"; + LogicTreeBranch branch = NSHM26_LogicTree.DEFAULT_AMSAM_SUBDUCTION_INTERFACE; + dirName += "-amsam"; branch.setValue(PRVI25_SubductionBValues.B_1p0); From c991abd2ca1b5f7f157d34b91bf88af66bf1b8a2 Mon Sep 17 00:00:00 2001 From: Kevin Milner Date: Mon, 9 Mar 2026 13:00:51 -0700 Subject: [PATCH 11/13] alpha coulomb segmentation tests --- .../segmentation/RelativeCFFCalculator.java | 345 ++++++++++++++++++ .../kevin/segmentation/RelativeCFFTests.java | 94 +++++ .../segmentation/TestRelativeSegModels.java | 54 +++ 3 files changed, 493 insertions(+) create mode 100644 src/main/java/scratch/kevin/segmentation/RelativeCFFCalculator.java create mode 100644 src/main/java/scratch/kevin/segmentation/RelativeCFFTests.java create mode 100644 src/main/java/scratch/kevin/segmentation/TestRelativeSegModels.java diff --git a/src/main/java/scratch/kevin/segmentation/RelativeCFFCalculator.java b/src/main/java/scratch/kevin/segmentation/RelativeCFFCalculator.java new file mode 100644 index 00000000..c3b18834 --- /dev/null +++ b/src/main/java/scratch/kevin/segmentation/RelativeCFFCalculator.java @@ -0,0 +1,345 @@ +package scratch.kevin.segmentation; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.function.Supplier; + +import org.opensha.commons.geo.Location; +import org.opensha.commons.geo.LocationUtils; +import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; +import org.opensha.sha.earthquake.faultSysSolution.modules.ClusterRuptures; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.ClusterRupture; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.FaultSubsectionCluster; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.Jump; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.plausibility.PlausibilityConfiguration; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.strategies.ClusterConnectionStrategy; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.util.RuptureTreeNavigator; +import org.opensha.sha.earthquake.faultSysSolution.util.FaultSysTools; +import org.opensha.sha.earthquake.rupForecastImpl.nshm23.NSHM23_InvConfigFactory; +import org.opensha.sha.earthquake.rupForecastImpl.nshm23.logicTree.NSHM23_LogicTreeBranch; +import org.opensha.sha.faultSurface.FaultSection; +import org.opensha.sha.faultSurface.FaultTrace; +import org.opensha.sha.faultSurface.GeoJSONFaultSection; +import org.opensha.sha.simulators.stiffness.SubSectStiffnessCalculator; +import org.opensha.sha.simulators.stiffness.SubSectStiffnessCalculator.PatchAlignment; +import org.opensha.sha.simulators.stiffness.SubSectStiffnessCalculator.PatchLocation; +import org.opensha.sha.simulators.stiffness.SubSectStiffnessCalculator.StiffnessDistribution; +import org.opensha.sha.simulators.stiffness.SubSectStiffnessCalculator.StiffnessType; + +import com.google.common.base.Preconditions; + +public class RelativeCFFCalculator { + + private SubSectStiffnessCalculator stiffnessCalc; + + private Map cffRefRatios; + + public RelativeCFFCalculator(FaultSystemRupSet rupSet, SubSectStiffnessCalculator stiffnessCalc, int threads) { + this.stiffnessCalc = stiffnessCalc; + + ClusterRuptures cRups = rupSet.requireModule(ClusterRuptures.class); + + // for each jump, find the set of destination clusters on each side; keep those that are the largest or that + // terminate at subsequent jumps. + Map jumpDestinations = new HashMap<>(); + System.out.println("Detecting jump destinations for "+cRups.size()+" ruptures"); + for (int rupIndex=0; rupIndex(); + for (Jump jump : jumpDestinations.keySet()) { + DestinationClusters sources = jumpDestinations.get(jump.reverse()); + DestinationClusters destinations = jumpDestinations.get(jump); + double val; + try { + val = new JumpRelativeSupplier(jump, sources, destinations).get(); + } catch (Exception e) { + val = new JumpRelativeSupplier(jump, sources, destinations, true).get(); + } + cffRefRatios.put(jump, val); + } + } else { + ExecutorService exec = Executors.newFixedThreadPool(threads); + + System.out.println("Calculationg CFF reference ratios for "+jumpDestinations.size()+" jumps"); + Map> futures = new HashMap<>(jumpDestinations.size()); + for (Jump jump : jumpDestinations.keySet()) { + DestinationClusters sources = jumpDestinations.get(jump.reverse()); + DestinationClusters destinations = jumpDestinations.get(jump); + futures.put(jump, CompletableFuture.supplyAsync(new JumpRelativeSupplier(jump, sources, destinations), exec)); + } + + cffRefRatios = new HashMap<>(); + for (Jump jump : futures.keySet()) + cffRefRatios.put(jump, futures.get(jump).join()); + + exec.shutdown(); + } + } + + public double getRelativeCFF(Jump jump) { + Preconditions.checkState(cffRefRatios.containsKey(jump), "Unexpected jump: "+jump); + return cffRefRatios.get(jump); + } + + private static class DestinationClusters { + private FaultSubsectionCluster largestSingle; + private FaultSubsectionCluster largestForward; + private FaultSubsectionCluster largestBackward; + private Set intermediateClusters; + + public DestinationClusters() { + this.intermediateClusters = new HashSet<>(); + } + + public void add(FaultSubsectionCluster toCluster, FaultSection toSect, boolean isIntermediate) { + int toAlong = toSect.getSubSectionIndexAlong(); + Preconditions.checkState(toAlong >= 0, "Bad indexAlong=%s for %s", toAlong, toSect); + int minAlong = toAlong; + int maxAlong = toAlong; + List forwardSects = new ArrayList<>(); + List backwardSects = new ArrayList<>(); + for (FaultSection sect : toCluster.subSects) { + int along = sect.getSubSectionIndexAlong(); + if (along >= toAlong) { + forwardSects.add(sect); + maxAlong = Integer.max(maxAlong, along); + } + if (along <= toAlong) { + backwardSects.add(sect); + minAlong = Integer.min(minAlong, along); + } + } + if (minAlong == maxAlong) { + if (largestSingle == null || toCluster.subSects.size() > largestSingle.subSects.size()) + largestSingle = toCluster; + + if (isIntermediate) + intermediateClusters.add(toCluster); + } else { + if (!forwardSects.isEmpty()) { + FaultSubsectionCluster forwardCluster = forwardSects.size() == toCluster.subSects.size() ? + toCluster : new FaultSubsectionCluster(forwardSects); + if (largestForward == null || forwardSects.size() > largestForward.subSects.size()) + largestForward = forwardCluster; + + if (isIntermediate) + intermediateClusters.add(forwardCluster); + } + + if (!backwardSects.isEmpty()) { + FaultSubsectionCluster backwardCluster = backwardSects.size() == toCluster.subSects.size() ? + toCluster : new FaultSubsectionCluster(backwardSects); + if (largestBackward == null || backwardSects.size() > largestBackward.subSects.size()) + largestBackward = backwardCluster; + + if (isIntermediate) + intermediateClusters.add(backwardCluster); + } + } + } + + public Set> getUnique() { + Set> uniques = new HashSet<>(); + if (intermediateClusters != null) + for (FaultSubsectionCluster cluster : intermediateClusters) + uniques.add(Set.copyOf(cluster.subSects)); + + if (largestForward == null && largestBackward == null && largestSingle != null) { + // only include single if we don't have a path in either direction + uniques.add(Set.copyOf(largestSingle.subSects)); + } else { + if (largestForward != null) + uniques.add(Set.copyOf(largestForward.subSects)); + if (largestBackward != null) + uniques.add(Set.copyOf(largestBackward.subSects)); + } + + return uniques; + } + } + + private class JumpRelativeSupplier implements Supplier { + + private Jump jump; + private DestinationClusters sources; + private DestinationClusters destinations; + + private final boolean D; + + public JumpRelativeSupplier(Jump jump, DestinationClusters sources, DestinationClusters destinations) { + this(jump, sources, destinations, false); + } + + public JumpRelativeSupplier(Jump jump, DestinationClusters sources, DestinationClusters destinations, boolean D) { + this.jump = jump; + this.sources = sources; + this.destinations = destinations; + this.D = D; + } + + @Override + public Double get() { + Set> sourcesSet = sources.getUnique(); + Set> destinationsSet = destinations.getUnique(); + + if (D) System.out.println("Calculating for jump "+jump+" with "+sourcesSet.size() + +" sourceSets and "+destinationsSet.size()+" destinationSets"); + + double best = 0d; + for (Set sources : sourcesSet) { + int minAlong = Integer.MAX_VALUE; + int maxAlong = Integer.MIN_VALUE; + for (FaultSection sect : sources) { + int along = sect.getSubSectionIndexAlong(); + minAlong = Integer.min(minAlong, along); + maxAlong = Integer.max(maxAlong, along); + } + int jumpAlong = jump.fromSection.getSubSectionIndexAlong(); + Preconditions.checkState(jumpAlong == minAlong || jumpAlong == maxAlong); + + List jumpEdgeSects = new ArrayList<>(1); + for (FaultSection sect : sources) + if (sect.getSubSectionIndexAlong() == jumpAlong) + jumpEdgeSects.add(sect); + + for (Set destinations : destinationsSet) { + // this propery accounts for down-dip sections + double len = FaultSystemRupSet.calculateLength(destinations) * 1e-3; // m -> km + + // figure out the best reference CFF value that is of the same length as this destination + if (D) System.out.println("\tFinding reference for destination of lengh "+(float)len+": "+destinations); + double bestRef = 0d; + for (boolean forward : new boolean[] {true,false}) { + if (minAlong != maxAlong) { + // see what direction we're going + boolean actualForward = jumpAlong > minAlong; + if (actualForward != forward) + continue; + } + if (D) System.out.println("\t\tBuilding source reference of lengh "+(float)len + +", forward="+forward+" for: "+sources+" with edges: "+jumpEdgeSects); + // extend the edge in a planar manner in the average strike direction of the subesction + // to get a reference CFF + List jumpToSects = new ArrayList<>(jumpEdgeSects.size()); + for (FaultSection edge : jumpEdgeSects) { + FaultTrace origTrace = edge.getFaultTrace(); + double direction = forward ? origTrace.getStrikeDirection() : 180d + origTrace.getStrikeDirection(); + Location edgeLoc = forward ? origTrace.last() : origTrace.first(); + GeoJSONFaultSection extension = new GeoJSONFaultSection.Builder( + 100000+edge.getSectionId(), edge.getName()+" extension", + FaultTrace.of(edgeLoc, LocationUtils.location(edgeLoc, Math.toRadians(direction), len))) + .upperDepth(edge.getOrigAveUpperDepth()) + .lowerDepth(edge.getAveLowerDepth()) + .dip(edge.getAveDip()).rake(edge.getAveRake()) + .dipDir(edge.getDipDirection()) + .aseismicity(edge.getAseismicSlipFactor()) + .build(); +// if (D) System.out.println("\t\t\tExtension trace: "+extension.getFaultTrace()); + jumpToSects.add(extension); + } + double refSum = 0d; + for (FaultSection source : sources) { + List sourcePatches = stiffnessCalc.getPatches(source); + for (FaultSection receiver : jumpToSects) { + List receiverPatches = stiffnessCalc.buildPatches(receiver); + double[] selfStiffness = null; + if (stiffnessCalc.getSelfStiffnessCap() > 0d) + selfStiffness = stiffnessCalc.calcSelfStiffness(receiverPatches); + StiffnessDistribution dist = stiffnessCalc.calcStiffnessDistribution(sourcePatches, receiverPatches, selfStiffness); + for (double[] vals : dist.get(StiffnessType.CFF)) + for (double val : vals) + refSum += val; + } + } + if (D) System.out.println("\t\t\tRef CFF="+(float)refSum); + if (refSum > bestRef) + bestRef = refSum; + } + + if (D) System.out.println("\t\tBestRef CFF="+(float)bestRef); + + double mySum = 0d; + for (FaultSection source : sources) { + for (FaultSection receiver : destinations) { + StiffnessDistribution dist = stiffnessCalc.calcStiffnessDistribution(source, receiver); + for (double[] vals : dist.get(StiffnessType.CFF)) + for (double val : vals) + mySum += val; + } + } + + if (D) System.out.println("\t\tMyCFF="+(float)mySum+";\tBestRefCFF="+(float)bestRef+";\tRatio="+(float)(mySum/bestRef)); + + Preconditions.checkState(bestRef > 0, "Couldn't find any positive reference CFF value: %s", bestRef); + best = Math.max(best, mySum/bestRef); + } + } + return best; + } + + } + +// private FaultSection buildExtension + + public static void main(String[] args) throws IOException { + NSHM23_InvConfigFactory factory = new NSHM23_InvConfigFactory(); + factory.setCacheDir(new File("/home/kevin/OpenSHA/nshm23/rup_sets/cache")); + + FaultSystemRupSet rs = factory.buildRuptureSet(NSHM23_LogicTreeBranch.DEFAULT_ON_FAULT, FaultSysTools.defaultNumThreads()); + ClusterConnectionStrategy connStrat = rs.requireModule(PlausibilityConfiguration.class).getConnectionStrategy(); + + SubSectStiffnessCalculator stiffnessCalc = new SubSectStiffnessCalculator( + rs.getFaultSectionDataList(), 2d, 3e4, 3e4, 0.5, PatchAlignment.FILL_OVERLAP, 1d); + + int threads = 1; +// int threads = FaultSysTools.defaultNumThreads(); + RelativeCFFCalculator calc = null; + try { + calc = new RelativeCFFCalculator(rs, stiffnessCalc, threads); + } catch (Exception e) { + e.printStackTrace(); + System.exit(0); + } + + int fromID = 1424; + for (Jump jump : connStrat.getJumpsFrom(rs.getFaultSectionData(fromID))) { + System.out.println(jump); + System.out.println("\tFROM:\t"+jump.fromCluster); + System.out.println("\tTO:\t"+jump.toCluster); + System.out.println("\tForward relCFF:\t"+(float)calc.getRelativeCFF(jump)); + System.out.println("\tReverse relCFF:\t"+(float)calc.getRelativeCFF(jump.reverse())); + } + } + +} diff --git a/src/main/java/scratch/kevin/segmentation/RelativeCFFTests.java b/src/main/java/scratch/kevin/segmentation/RelativeCFFTests.java new file mode 100644 index 00000000..165e6fae --- /dev/null +++ b/src/main/java/scratch/kevin/segmentation/RelativeCFFTests.java @@ -0,0 +1,94 @@ +package scratch.kevin.segmentation; + +import java.util.List; + +import org.opensha.commons.geo.Location; +import org.opensha.commons.geo.LocationUtils; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.plausibility.impl.prob.Shaw07JumpDistProb; +import org.opensha.sha.earthquake.rupForecastImpl.nshm23.logicTree.NSHM23_SegmentationModels; +import org.opensha.sha.faultSurface.FaultTrace; +import org.opensha.sha.faultSurface.GeoJSONFaultSection; +import org.opensha.sha.simulators.stiffness.AggregatedStiffnessCalculator; +import org.opensha.sha.simulators.stiffness.SubSectStiffnessCalculator; +import org.opensha.sha.simulators.stiffness.AggregatedStiffnessCalculator.AggregationMethod; +import org.opensha.sha.simulators.stiffness.SubSectStiffnessCalculator.PatchAlignment; +import org.opensha.sha.simulators.stiffness.SubSectStiffnessCalculator.StiffnessType; +import org.opensha.sha.util.FocalMech; + +public class RelativeCFFTests { + + public static void main(String[] args) { + Location origin = new Location(0d, 0d); + double len1 = 10d; +// FocalMech mech1 = FocalMech.STRIKE_SLIP; + FocalMech mech1 = FocalMech.REVERSE; + double strike1 = 0d; + +// FocalMech mech2 = FocalMech.STRIKE_SLIP; + FocalMech mech2 = FocalMech.REVERSE; + double jumpAngle = 0d; +// double strike2 = 0d; + double strike2 = 0d; + double distance = 1; +// double len2 = len1; + double len2 = 10d; + + double upper = 0d; + double lower = 10d; + + double stiffGridSpacing = 2d; + double coeffOfFriction = 0.5; +// double coeffOfFriction = 0.0; + double lameLambda = 3e4; + double lameMu = 3e4; + + Location end1 = LocationUtils.location(origin, Math.toRadians(strike1), len1); + + GeoJSONFaultSection sect1 = new GeoJSONFaultSection.Builder(1, "Source Fault", + FaultTrace.of(origin, end1)) + .upperDepth(upper).lowerDepth(lower) + .dip(mech1.dip()).rake(mech1.rake()).build(); + + Location start2 = LocationUtils.location(end1, Math.toRadians(jumpAngle), distance); + Location end2 = LocationUtils.location(start2, Math.toRadians(strike2), len2); + GeoJSONFaultSection sect2 = new GeoJSONFaultSection.Builder(2, "Destination Fault", + FaultTrace.of(start2, end2)) + .upperDepth(upper).lowerDepth(lower) + .dip(mech2.dip()).rake(mech2.rake()).build(); + + GeoJSONFaultSection sect2_equivRef = new GeoJSONFaultSection.Builder(1002, "Reference Destination Fault", + FaultTrace.of(end1, LocationUtils.location(end1, Math.toRadians(strike1), len2))) + .upperDepth(upper).lowerDepth(lower) + .dip(mech1.dip()).rake(mech1.rake()).build(); + + List sects = List.of(sect1, sect2, sect2_equivRef); + +// SubSectStiffnessCalculator calc = new SubSectStiffnessCalculator(null, distance, len2, upper, lower) + SubSectStiffnessCalculator stiffnessCalc = new SubSectStiffnessCalculator( + sects, stiffGridSpacing, lameLambda, lameMu, coeffOfFriction, PatchAlignment.FILL_OVERLAP, 1d); + AggregatedStiffnessCalculator sumAgg = new AggregatedStiffnessCalculator(StiffnessType.CFF, stiffnessCalc, true, + AggregationMethod.FLATTEN, AggregationMethod.SUM, AggregationMethod.SUM, AggregationMethod.SUM); + + Shaw07JumpDistProb[] segModels = { + NSHM23_SegmentationModels.HIGH.getShawModel(), + NSHM23_SegmentationModels.MID.getShawModel(), + NSHM23_SegmentationModels.LOW.getShawModel() + }; + Shaw07JumpDistProb vanillaSeg = new Shaw07JumpDistProb(1d, 3d); + + System.out.println("Distance: "+(float)distance+" km"); + + double refStiffness = sumAgg.calc(sect1, sect2_equivRef); + System.out.println("Ref stiffness: "+(float)refStiffness); + double sect2Stiffness = sumAgg.calc(sect1, sect2); + System.out.println("Actual stiffness: "+(float)sect2Stiffness); + System.out.println("Actual relative: "+(float)(sect2Stiffness/refStiffness)); + + System.out.print("NSHM23 segs:"); + for (Shaw07JumpDistProb seg : segModels) + System.out.print(" "+(float)seg.calcJumpProbability(distance)); + System.out.println(); + System.out.println("Vanilla ("+vanillaSeg+"): "+(float)vanillaSeg.calcJumpProbability(distance)); + } + +} diff --git a/src/main/java/scratch/kevin/segmentation/TestRelativeSegModels.java b/src/main/java/scratch/kevin/segmentation/TestRelativeSegModels.java new file mode 100644 index 00000000..8d1323cf --- /dev/null +++ b/src/main/java/scratch/kevin/segmentation/TestRelativeSegModels.java @@ -0,0 +1,54 @@ +package scratch.kevin.segmentation; + +import org.opensha.commons.logicTree.LogicTreeBranch; +import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; +import org.opensha.sha.earthquake.faultSysSolution.ruptures.plausibility.impl.prob.JumpProbabilityCalc; +import org.opensha.sha.earthquake.rupForecastImpl.nshm23.logicTree.NSHM23_SegmentationModels; +import org.opensha.sha.earthquake.rupForecastImpl.nshm23.logicTree.RupsThroughCreepingSectBranchNode; +import org.opensha.sha.earthquake.rupForecastImpl.nshm23.logicTree.SegmentationModelBranchNode; + +public enum TestRelativeSegModels implements SegmentationModelBranchNode, RupsThroughCreepingSectBranchNode { + RELATIVE_CFF_MIDDLE("Relative (Middle)", NSHM23_SegmentationModels.MID); + + private String name; + private NSHM23_SegmentationModels segModel; + + private TestRelativeSegModels(String name, NSHM23_SegmentationModels segModel) { + this.name = name; + this.segModel = segModel; + + } + + @Override + public double getNodeWeight(LogicTreeBranch fullBranch) { + return 1; + } + + @Override + public String getFilePrefix() { + return name(); + } + + @Override + public String getShortName() { + return name; + } + + @Override + public String getName() { + return name; + } + + @Override + public boolean isExcludeRupturesThroughCreepingSect() { + return segModel.isExcludeRupturesThroughCreepingSect(); + } + + @Override + public JumpProbabilityCalc getModel(FaultSystemRupSet rupSet, LogicTreeBranch branch) { + JumpProbabilityCalc upstreamModel = NSHM23_SegmentationModels.buildModel(rupSet, Double.NaN, Double.NaN, + segModel.getWasatchPassthrough(), segModel.getCreepingSectPassthrough(), Double.NaN, false); + return null; + } + +} From 00d386f289e6a97ecf0dddaf41d4bbc07aef6544 Mon Sep 17 00:00:00 2001 From: Kevin Milner Date: Mon, 9 Mar 2026 14:29:21 -0700 Subject: [PATCH 12/13] renamed and deprecated the old CompoundSurface implementation --- .../earthquake/nshmp/model/NshmSource.java | 4 +- .../BayAreaRegionalGroundMotionCalc.java | 10 +- .../java/scratch/kevin/JorgeShakeMapCalc.java | 4 +- .../nshm23/NewCompoundSurfaceComparisons.java | 102 ++++++++++++++---- ...gleSiteHazardAndDataComparisonPageGen.java | 10 +- .../nshm23/figures/Regional_MFD_Plots.java | 6 +- .../simulators/erf/RSQSimSectBundledERF.java | 6 +- .../scratch/kevin/ucerf3/CatalogWriter.java | 6 +- .../kevin/ucerf3/CoulombFileParser.java | 4 +- .../kevin/ucerf3/QuadToEvenlyGridded.java | 4 +- .../kevin/ucerf3/UCERF3_IM_Calculator.java | 6 +- .../kevin/ucerf3/etas/MapStorageCalcs.java | 4 +- .../ucerf3/inversion/ERFInsideSpeedTest.java | 4 +- .../java/scratch/ned/nshm23/MiscPlots.java | 4 +- 14 files changed, 117 insertions(+), 57 deletions(-) diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/NshmSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/NshmSource.java index 51ee59cd..8663f4ce 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/NshmSource.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/NshmSource.java @@ -9,7 +9,7 @@ import org.opensha.commons.geo.LocationUtils; import org.opensha.sha.earthquake.ProbEqkRupture; import org.opensha.sha.earthquake.ProbEqkSource; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.RuptureSurface; import gov.usgs.earthquake.nshmp.Maths; @@ -149,7 +149,7 @@ static class System extends NshmSource { delegate.rate(), weight, duration, - new CompoundSurface(surfaces)); + new OldCompoundSurface(surfaces)); } @Override diff --git a/src/main/java/scratch/kevin/BayAreaRegionalGroundMotionCalc.java b/src/main/java/scratch/kevin/BayAreaRegionalGroundMotionCalc.java index c2696734..2d073d5e 100644 --- a/src/main/java/scratch/kevin/BayAreaRegionalGroundMotionCalc.java +++ b/src/main/java/scratch/kevin/BayAreaRegionalGroundMotionCalc.java @@ -78,7 +78,7 @@ import org.opensha.sha.earthquake.util.GridCellSupersamplingSettings; import org.opensha.sha.earthquake.util.GriddedSeismicitySettings; import org.opensha.sha.faultSurface.ApproxEvenlyGriddedSurface; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.FaultTrace; import org.opensha.sha.faultSurface.GriddedSurfaceImpl; import org.opensha.sha.faultSurface.PointSurface; @@ -659,8 +659,8 @@ public static void main(String[] args) throws IOException { RuptureSurface fullSurf = rup.getRuptureSurface(); BitSet hypoBitSet = new BitSet(sitesAtNodes.size()); List surfsToCheck; - if (fullSurf instanceof CompoundSurface) { - surfsToCheck = ((CompoundSurface)fullSurf).getSurfaceList(); + if (fullSurf instanceof OldCompoundSurface) { + surfsToCheck = ((OldCompoundSurface)fullSurf).getSurfaceList(); } else { surfsToCheck = List.of(fullSurf); } @@ -1095,7 +1095,7 @@ public List call() throws Exception { List rups = new ArrayList<>(events.size()); for (ProbEqkRupture rup : events) { RuptureSurface surf = rup.getRuptureSurface(); - if (surf instanceof CompoundSurface) { + if (surf instanceof OldCompoundSurface) { ProbEqkRupture origRup = rup; RuptureSurface wrappedSurf = DistCachedERFWrapper.getWrappedSurface(wrappedMap, surf); rup = new DistCacheWrapperRupture(rup, wrappedSurf); @@ -1184,7 +1184,7 @@ public EpiCalcCallable call() throws Exception { Preconditions.checkNotNull(sites); RuptureSurface surf = event.getRuptureSurface(); - if (surf instanceof CompoundSurface) { + if (surf instanceof OldCompoundSurface) { RuptureSurface wrappedSurf = DistCachedERFWrapper.getWrappedSurface(wrappedMap, surf); event = new DistCacheWrapperRupture(event, wrappedSurf); } diff --git a/src/main/java/scratch/kevin/JorgeShakeMapCalc.java b/src/main/java/scratch/kevin/JorgeShakeMapCalc.java index a1b3cb56..59211662 100644 --- a/src/main/java/scratch/kevin/JorgeShakeMapCalc.java +++ b/src/main/java/scratch/kevin/JorgeShakeMapCalc.java @@ -24,7 +24,7 @@ import org.opensha.sha.calc.ScenarioShakeMapCalculator; import org.opensha.sha.earthquake.ProbEqkRupture; import org.opensha.sha.earthquake.faultSysSolution.FaultSystemSolution; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.EvenlyGriddedSurface; import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.faultSurface.RuptureSurface; @@ -213,7 +213,7 @@ public static void main(String[] args) throws IOException, DocumentException, GM map.setCustomScaleMin((double)myCPT.getMinValue()); map.setCustomScaleMax((double)myCPT.getMaxValue()); map.setCustomLabel(label); - CompoundSurface surf = (CompoundSurface)rup.getRuptureSurface(); + OldCompoundSurface surf = (OldCompoundSurface)rup.getRuptureSurface(); for (RuptureSurface gridSurf : surf.getSurfaceList()) GMT_MapGeneratorForShakeMaps.addRupture(map, (EvenlyGriddedSurface)gridSurf, rup.getHypocenterLocation(), GMT_MapGeneratorForShakeMaps.RUP_PLOT_PARAM_PERIMETER); diff --git a/src/main/java/scratch/kevin/nshm23/NewCompoundSurfaceComparisons.java b/src/main/java/scratch/kevin/nshm23/NewCompoundSurfaceComparisons.java index 58764e57..e5e66abe 100644 --- a/src/main/java/scratch/kevin/nshm23/NewCompoundSurfaceComparisons.java +++ b/src/main/java/scratch/kevin/nshm23/NewCompoundSurfaceComparisons.java @@ -1,47 +1,54 @@ package scratch.kevin.nshm23; -import java.awt.Color; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import org.apache.commons.math3.util.Precision; import org.opensha.commons.geo.GriddedRegion; import org.opensha.commons.geo.Location; import org.opensha.commons.geo.LocationList; import org.opensha.commons.geo.LocationUtils; import org.opensha.commons.geo.LocationUtils.LocationAverager; -import org.opensha.commons.geo.LocationVector; import org.opensha.commons.gui.plot.GeographicMapMaker; import org.opensha.commons.gui.plot.PlotCurveCharacterstics; import org.opensha.commons.gui.plot.PlotLineType; import org.opensha.commons.gui.plot.PlotSymbol; -import org.opensha.commons.mapping.gmt.elements.GMT_CPT_Files; -import org.opensha.commons.util.cpt.CPT; import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; import org.opensha.sha.earthquake.faultSysSolution.FaultSystemSolution; import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.faultSurface.NewCompoundSurface; import org.opensha.sha.faultSurface.RuptureSurface; +import org.opensha.sha.faultSurface.cache.SurfaceCachingPolicy; +import org.opensha.sha.faultSurface.cache.SurfaceDistances; +import org.opensha.sha.faultSurface.cache.SurfaceCachingPolicy.CacheTypes; import com.google.common.base.Preconditions; +import com.google.common.base.Stopwatch; import net.mahdilamb.colormap.Colors; public class NewCompoundSurfaceComparisons { public static void main(String[] args) throws IOException { + SurfaceCachingPolicy.force(CacheTypes.THREAD_LOCAL); FaultSystemSolution sol = FaultSystemSolution.load(new File("/home/kevin/OpenSHA/nshm23/batch_inversions/" + "2024_02_02-nshm23_branches-WUS_FM_v3/results_WUS_FM_v3_branch_averaged.zip")); FaultSystemRupSet rupSet = sol.getRupSet(); + surfBuildTime(rupSet, 50); + System.exit(0); + GriddedRegion sitesGrid = new GriddedRegion(NSHM23_RegionLoader.loadFullConterminousWUS(), 0.25, GriddedRegion.ANCHOR_0_0); System.out.println("Have "+sitesGrid.getNodeCount()+" sites"); - CompoundSurface[] origSurfs = new CompoundSurface[rupSet.getNumRuptures()]; + OldCompoundSurface[] origSurfs = new OldCompoundSurface[rupSet.getNumRuptures()]; NewCompoundSurface[] newSurfs = new NewCompoundSurface[rupSet.getNumRuptures()]; File outputDir = new File("/tmp/compound_surf_test"); @@ -53,20 +60,30 @@ public static void main(String[] args) throws IOException { for (int r=0; r> futures = new ArrayList<>(); + for (Location loc : sitesGrid.getNodeList()) { + futures.add(CompletableFuture.runAsync(() -> { + for (int r=0; r future = futures.get(i); + future.join(); + System.out.print("."); + if (i > 0 && i % 100 == 0) + System.out.println(" "+i+"/"+futures.size()); + } + System.out.println(" DONE "+futures.size()); } private static void debugSurf(File outputDir, FaultSystemRupSet rupSet, int rupIndex, - CompoundSurface origSurf, NewCompoundSurface newSurf) throws IOException { + OldCompoundSurface origSurf, NewCompoundSurface newSurf) throws IOException { List sects = rupSet.getFaultSectionDataForRupture(rupIndex); GeographicMapMaker mapMaker = new GeographicMapMaker(sects); mapMaker.setWriteGeoJSON(false); @@ -155,6 +200,21 @@ private static void debugSurf(File outputDir, FaultSystemRupSet rupSet, int rupI mapMaker.plot(outputDir, "rupture_"+rupIndex, " "); } + private static void surfBuildTime(FaultSystemRupSet rupSet, int trials) { + Stopwatch watch = Stopwatch.createUnstarted(); + for (int i=0; i fullIndexes = new ArrayList<>(); - if (surf instanceof CompoundSurface) { - List subSurfs = ((CompoundSurface)surf).getSurfaceList(); + if (surf instanceof OldCompoundSurface) { + List subSurfs = ((OldCompoundSurface)surf).getSurfaceList(); for (RuptureSurface subSurf : subSurfs) { int[] indexes; indexes = mappedIndexes.get(subSurf); @@ -1049,8 +1049,8 @@ static boolean canSkipNshmSurf(Region reg, RuptureSurface surf) { return false; } return !reg.contains(centroid); - } else if (surf instanceof CompoundSurface) { - for (RuptureSurface subSurf : ((CompoundSurface)surf).getSurfaceList()) { + } else if (surf instanceof OldCompoundSurface) { + for (RuptureSurface subSurf : ((OldCompoundSurface)surf).getSurfaceList()) { if (!canSkipNshmSurf(reg, subSurf)) // at least one is false, return false return false; diff --git a/src/main/java/scratch/kevin/nshm23/figures/Regional_MFD_Plots.java b/src/main/java/scratch/kevin/nshm23/figures/Regional_MFD_Plots.java index c9736301..ef9ed1a4 100644 --- a/src/main/java/scratch/kevin/nshm23/figures/Regional_MFD_Plots.java +++ b/src/main/java/scratch/kevin/nshm23/figures/Regional_MFD_Plots.java @@ -61,7 +61,7 @@ import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader.NSHM23_BaseRegion; import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader.SeismicityRegions; import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader.StitchedRegions; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.RuptureSurface; import org.opensha.sha.magdist.IncrementalMagFreqDist; import org.opensha.sha.util.TectonicRegionType; @@ -943,8 +943,8 @@ public List call() throws Exception { RuptureSurface surf = rup.getRuptureSurface(); int[] countsInside = new int[regions.length]; int count = 0; - if (surf instanceof CompoundSurface) { - for (RuptureSurface subSurf : ((CompoundSurface)surf).getSurfaceList()) { + if (surf instanceof OldCompoundSurface) { + for (RuptureSurface subSurf : ((OldCompoundSurface)surf).getSurfaceList()) { SurfInRegionsResult cached = subSurfInsideCache.get(subSurf); if (cached == null) { int subCount = 0; diff --git a/src/main/java/scratch/kevin/simulators/erf/RSQSimSectBundledERF.java b/src/main/java/scratch/kevin/simulators/erf/RSQSimSectBundledERF.java index 299ba9ab..6b3a154c 100644 --- a/src/main/java/scratch/kevin/simulators/erf/RSQSimSectBundledERF.java +++ b/src/main/java/scratch/kevin/simulators/erf/RSQSimSectBundledERF.java @@ -49,7 +49,7 @@ import org.opensha.sha.earthquake.ProbEqkSource; import org.opensha.sha.earthquake.faultSysSolution.RupSetDeformationModel; import org.opensha.sha.earthquake.faultSysSolution.RupSetFaultModel; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.faultSurface.RuptureSurface; import org.opensha.sha.simulators.EventRecord; @@ -491,7 +491,7 @@ private static RuptureSurface buildSubSectSurface(List sortedRupSe if (rupSurfs.size() == 1) surf = rupSurfs.get(0); else - surf = new CompoundSurface(rupSurfs); + surf = new OldCompoundSurface(rupSurfs); return surf; } @@ -756,7 +756,7 @@ public synchronized boolean isWithinBuffer(Location loc, Collection par if (surfs.size() == 1) compound = surfs.get(0); else - compound = new CompoundSurface(surfs); + compound = new OldCompoundSurface(surfs); LocationList trace = compound.getEvenlyDiscritizedUpperEdge(); // this can have duplicates in it, remove those for (int p=trace.size(); --p>0;) { diff --git a/src/main/java/scratch/kevin/ucerf3/CatalogWriter.java b/src/main/java/scratch/kevin/ucerf3/CatalogWriter.java index a92ccbb7..a06b5b0b 100644 --- a/src/main/java/scratch/kevin/ucerf3/CatalogWriter.java +++ b/src/main/java/scratch/kevin/ucerf3/CatalogWriter.java @@ -15,7 +15,7 @@ import org.opensha.sha.earthquake.observedEarthquake.ObsEqkRupture; import org.opensha.sha.earthquake.observedEarthquake.parsers.UCERF3_CatalogParser; import org.opensha.sha.earthquake.observedEarthquake.parsers.ngaWest.NGAWestParser; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.EvenlyGriddedSurface; import org.opensha.sha.faultSurface.RuptureSurface; @@ -30,8 +30,8 @@ protected static void writeFiniteSurfaceFile(ObsEqkRupture rup, File file, doubl protected static void writeFiniteSurfaceFile(RuptureSurface rupSurf, File file, double distCutoff) throws IOException { ArrayList surfs = new ArrayList(); - if (rupSurf instanceof CompoundSurface) - surfs.addAll(((CompoundSurface)rupSurf).getSurfaceList()); + if (rupSurf instanceof OldCompoundSurface) + surfs.addAll(((OldCompoundSurface)rupSurf).getSurfaceList()); else surfs.add(rupSurf); diff --git a/src/main/java/scratch/kevin/ucerf3/CoulombFileParser.java b/src/main/java/scratch/kevin/ucerf3/CoulombFileParser.java index e2ac8ebe..13a2a85e 100644 --- a/src/main/java/scratch/kevin/ucerf3/CoulombFileParser.java +++ b/src/main/java/scratch/kevin/ucerf3/CoulombFileParser.java @@ -15,7 +15,7 @@ import org.opensha.commons.util.FileUtils; import org.opensha.sha.earthquake.observedEarthquake.ObsEqkRupList; import org.opensha.sha.earthquake.observedEarthquake.ObsEqkRupture; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.EvenlyGriddedSurface; import org.opensha.sha.faultSurface.FaultTrace; import org.opensha.sha.faultSurface.RuptureSurface; @@ -116,7 +116,7 @@ public static RuptureSurface loadFaultSurface(File file, double gridSpacing) thr if (surfs.size() == 1) return surfs.get(0); - return new CompoundSurface(surfs); + return new OldCompoundSurface(surfs); } private static Location getRelativeLocation(Location origin, double xOffset, double yOffset) { diff --git a/src/main/java/scratch/kevin/ucerf3/QuadToEvenlyGridded.java b/src/main/java/scratch/kevin/ucerf3/QuadToEvenlyGridded.java index 57cae597..bb1be992 100644 --- a/src/main/java/scratch/kevin/ucerf3/QuadToEvenlyGridded.java +++ b/src/main/java/scratch/kevin/ucerf3/QuadToEvenlyGridded.java @@ -11,7 +11,7 @@ import org.opensha.commons.geo.LocationVector; import org.opensha.sha.earthquake.observedEarthquake.parsers.ngaWest.NGAWestParser; import org.opensha.sha.faultSurface.ApproxEvenlyGriddedSurface; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.EvenlyGriddedSurface; import org.opensha.sha.faultSurface.FaultTrace; import org.opensha.sha.faultSurface.GriddedSubsetSurface; @@ -200,7 +200,7 @@ public static void main(String[] args) throws IOException { } surfs.add(gridded); } else { - for (RuptureSurface rupSurf : ((CompoundSurface)surf).getSurfaceList()) { + for (RuptureSurface rupSurf : ((OldCompoundSurface)surf).getSurfaceList()) { EvenlyGriddedSurface gridded = (EvenlyGriddedSurface)rupSurf; if (gridded.getNumCols() < 2 || gridded.getNumRows() < 2) { System.out.println("Weird size: "+gridded.getNumRows()+"x"+gridded.getNumCols()); diff --git a/src/main/java/scratch/kevin/ucerf3/UCERF3_IM_Calculator.java b/src/main/java/scratch/kevin/ucerf3/UCERF3_IM_Calculator.java index 43b68fad..859d9b9a 100644 --- a/src/main/java/scratch/kevin/ucerf3/UCERF3_IM_Calculator.java +++ b/src/main/java/scratch/kevin/ucerf3/UCERF3_IM_Calculator.java @@ -28,7 +28,7 @@ import org.opensha.sha.earthquake.faultSysSolution.modules.GridSourceProvider; import org.opensha.sha.earthquake.param.IncludeBackgroundOption; import org.opensha.sha.earthquake.param.IncludeBackgroundParam; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.FaultTrace; import org.opensha.sha.faultSurface.RuptureSurface; import org.opensha.sha.imr.AttenRelRef; @@ -324,10 +324,10 @@ private List pruneDuplicates(List locs) { } private RuptureSurface closestSubSurf(RuptureSurface surf, Location loc) { - if (surf instanceof CompoundSurface) { + if (surf instanceof OldCompoundSurface) { double minDist = Double.POSITIVE_INFINITY; RuptureSurface closest = null; - for (RuptureSurface subSurf : ((CompoundSurface)surf).getSurfaceList()) { + for (RuptureSurface subSurf : ((OldCompoundSurface)surf).getSurfaceList()) { double dist = subSurf.getDistanceRup(loc); if (dist < minDist) { minDist = dist; diff --git a/src/main/java/scratch/kevin/ucerf3/etas/MapStorageCalcs.java b/src/main/java/scratch/kevin/ucerf3/etas/MapStorageCalcs.java index f7a7a095..91250546 100644 --- a/src/main/java/scratch/kevin/ucerf3/etas/MapStorageCalcs.java +++ b/src/main/java/scratch/kevin/ucerf3/etas/MapStorageCalcs.java @@ -14,7 +14,7 @@ import org.opensha.commons.geo.Region; import org.opensha.refFaultParamDb.vo.FaultSectionPrefData; import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.faultSurface.RuptureSurface; @@ -132,7 +132,7 @@ public static void main(String[] args) throws IOException, DocumentException { if (surfs.size() == 1) compound = surfs.get(0); else - compound = new CompoundSurface(surfs); + compound = new OldCompoundSurface(surfs); LocationList trace = compound.getEvenlyDiscritizedUpperEdge(); // this can have duplicates in it, remove those for (int p=trace.size(); --p>0;) { diff --git a/src/main/java/scratch/kevin/ucerf3/inversion/ERFInsideSpeedTest.java b/src/main/java/scratch/kevin/ucerf3/inversion/ERFInsideSpeedTest.java index 4f43ce5a..24465920 100644 --- a/src/main/java/scratch/kevin/ucerf3/inversion/ERFInsideSpeedTest.java +++ b/src/main/java/scratch/kevin/ucerf3/inversion/ERFInsideSpeedTest.java @@ -13,7 +13,7 @@ import org.opensha.sha.earthquake.EqkRupture; import org.opensha.sha.earthquake.ProbEqkSource; import org.opensha.sha.earthquake.calc.ERF_Calculator; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.RupInRegionCache; import org.opensha.sha.faultSurface.RuptureSurface; @@ -56,7 +56,7 @@ public static void main(String[] args) throws ZipException, IOException { public boolean isRupInRegion(ERF erf, ProbEqkSource src, EqkRupture rup, int srcIndex, int rupIndex, Region region) { RuptureSurface surf = rup.getRuptureSurface(); - if (surf instanceof CompoundSurface) { + if (surf instanceof OldCompoundSurface) { ConcurrentMap regMap = map.get(region); if (regMap == null) { regMap = Maps.newConcurrentMap(); diff --git a/src/main/java/scratch/ned/nshm23/MiscPlots.java b/src/main/java/scratch/ned/nshm23/MiscPlots.java index 763475bb..c4a2eaa5 100644 --- a/src/main/java/scratch/ned/nshm23/MiscPlots.java +++ b/src/main/java/scratch/ned/nshm23/MiscPlots.java @@ -34,7 +34,7 @@ import org.opensha.sha.earthquake.rupForecastImpl.nshm23.logicTree.NSHM23_ScalingRelationships_StableContinental; import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader.AnalysisRegions; import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader.SeismicityRegions; -import org.opensha.sha.faultSurface.CompoundSurface; +import org.opensha.sha.faultSurface.OldCompoundSurface; import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.faultSurface.RuptureSurface; import org.opensha.sha.magdist.GutenbergRichterMagFreqDist; @@ -921,7 +921,7 @@ public static SummedMagFreqDist OLDgetMagFreqDistInRegion(NshmErf erf, Region re } } else { // it's a CompoundSurface - CompoundSurface compSurf = (CompoundSurface) surf; + OldCompoundSurface compSurf = (OldCompoundSurface) surf; List surfList = compSurf.getSurfaceList(); double ptRate = equivRate/surfList.size(); for(RuptureSurface surface: surfList) { From 53f65f1fd3f72f311022d7c0926c2f610f11414d Mon Sep 17 00:00:00 2001 From: Kevin Milner Date: Mon, 9 Mar 2026 15:00:35 -0700 Subject: [PATCH 13/13] removed old compound surface implementation --- .../earthquake/nshmp/model/NshmSource.java | 4 +- .../BayAreaRegionalGroundMotionCalc.java | 10 ++--- .../java/scratch/kevin/JorgeShakeMapCalc.java | 4 +- .../nshm23/NewCompoundSurfaceComparisons.java | 38 ++++++++----------- ...gleSiteHazardAndDataComparisonPageGen.java | 10 ++--- .../nshm23/figures/Regional_MFD_Plots.java | 6 +-- .../simulators/erf/RSQSimSectBundledERF.java | 6 +-- .../scratch/kevin/ucerf3/CatalogWriter.java | 6 +-- .../kevin/ucerf3/CoulombFileParser.java | 4 +- .../kevin/ucerf3/QuadToEvenlyGridded.java | 4 +- .../kevin/ucerf3/UCERF3_IM_Calculator.java | 6 +-- .../kevin/ucerf3/etas/MapStorageCalcs.java | 4 +- .../ucerf3/inversion/ERFInsideSpeedTest.java | 4 +- .../java/scratch/ned/nshm23/MiscPlots.java | 4 +- 14 files changed, 51 insertions(+), 59 deletions(-) diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/NshmSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/NshmSource.java index 8663f4ce..171ac9ce 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/NshmSource.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/NshmSource.java @@ -9,7 +9,7 @@ import org.opensha.commons.geo.LocationUtils; import org.opensha.sha.earthquake.ProbEqkRupture; import org.opensha.sha.earthquake.ProbEqkSource; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.RuptureSurface; import gov.usgs.earthquake.nshmp.Maths; @@ -149,7 +149,7 @@ static class System extends NshmSource { delegate.rate(), weight, duration, - new OldCompoundSurface(surfaces)); + CompoundSurface.get(surfaces)); } @Override diff --git a/src/main/java/scratch/kevin/BayAreaRegionalGroundMotionCalc.java b/src/main/java/scratch/kevin/BayAreaRegionalGroundMotionCalc.java index 2d073d5e..c2696734 100644 --- a/src/main/java/scratch/kevin/BayAreaRegionalGroundMotionCalc.java +++ b/src/main/java/scratch/kevin/BayAreaRegionalGroundMotionCalc.java @@ -78,7 +78,7 @@ import org.opensha.sha.earthquake.util.GridCellSupersamplingSettings; import org.opensha.sha.earthquake.util.GriddedSeismicitySettings; import org.opensha.sha.faultSurface.ApproxEvenlyGriddedSurface; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.FaultTrace; import org.opensha.sha.faultSurface.GriddedSurfaceImpl; import org.opensha.sha.faultSurface.PointSurface; @@ -659,8 +659,8 @@ public static void main(String[] args) throws IOException { RuptureSurface fullSurf = rup.getRuptureSurface(); BitSet hypoBitSet = new BitSet(sitesAtNodes.size()); List surfsToCheck; - if (fullSurf instanceof OldCompoundSurface) { - surfsToCheck = ((OldCompoundSurface)fullSurf).getSurfaceList(); + if (fullSurf instanceof CompoundSurface) { + surfsToCheck = ((CompoundSurface)fullSurf).getSurfaceList(); } else { surfsToCheck = List.of(fullSurf); } @@ -1095,7 +1095,7 @@ public List call() throws Exception { List rups = new ArrayList<>(events.size()); for (ProbEqkRupture rup : events) { RuptureSurface surf = rup.getRuptureSurface(); - if (surf instanceof OldCompoundSurface) { + if (surf instanceof CompoundSurface) { ProbEqkRupture origRup = rup; RuptureSurface wrappedSurf = DistCachedERFWrapper.getWrappedSurface(wrappedMap, surf); rup = new DistCacheWrapperRupture(rup, wrappedSurf); @@ -1184,7 +1184,7 @@ public EpiCalcCallable call() throws Exception { Preconditions.checkNotNull(sites); RuptureSurface surf = event.getRuptureSurface(); - if (surf instanceof OldCompoundSurface) { + if (surf instanceof CompoundSurface) { RuptureSurface wrappedSurf = DistCachedERFWrapper.getWrappedSurface(wrappedMap, surf); event = new DistCacheWrapperRupture(event, wrappedSurf); } diff --git a/src/main/java/scratch/kevin/JorgeShakeMapCalc.java b/src/main/java/scratch/kevin/JorgeShakeMapCalc.java index 59211662..a1b3cb56 100644 --- a/src/main/java/scratch/kevin/JorgeShakeMapCalc.java +++ b/src/main/java/scratch/kevin/JorgeShakeMapCalc.java @@ -24,7 +24,7 @@ import org.opensha.sha.calc.ScenarioShakeMapCalculator; import org.opensha.sha.earthquake.ProbEqkRupture; import org.opensha.sha.earthquake.faultSysSolution.FaultSystemSolution; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.EvenlyGriddedSurface; import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.faultSurface.RuptureSurface; @@ -213,7 +213,7 @@ public static void main(String[] args) throws IOException, DocumentException, GM map.setCustomScaleMin((double)myCPT.getMinValue()); map.setCustomScaleMax((double)myCPT.getMaxValue()); map.setCustomLabel(label); - OldCompoundSurface surf = (OldCompoundSurface)rup.getRuptureSurface(); + CompoundSurface surf = (CompoundSurface)rup.getRuptureSurface(); for (RuptureSurface gridSurf : surf.getSurfaceList()) GMT_MapGeneratorForShakeMaps.addRupture(map, (EvenlyGriddedSurface)gridSurf, rup.getHypocenterLocation(), GMT_MapGeneratorForShakeMaps.RUP_PLOT_PARAM_PERIMETER); diff --git a/src/main/java/scratch/kevin/nshm23/NewCompoundSurfaceComparisons.java b/src/main/java/scratch/kevin/nshm23/NewCompoundSurfaceComparisons.java index e5e66abe..828bc6d3 100644 --- a/src/main/java/scratch/kevin/nshm23/NewCompoundSurfaceComparisons.java +++ b/src/main/java/scratch/kevin/nshm23/NewCompoundSurfaceComparisons.java @@ -20,9 +20,8 @@ import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; import org.opensha.sha.earthquake.faultSysSolution.FaultSystemSolution; import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.FaultSection; -import org.opensha.sha.faultSurface.NewCompoundSurface; import org.opensha.sha.faultSurface.RuptureSurface; import org.opensha.sha.faultSurface.cache.SurfaceCachingPolicy; import org.opensha.sha.faultSurface.cache.SurfaceDistances; @@ -48,8 +47,8 @@ public static void main(String[] args) throws IOException { System.out.println("Have "+sitesGrid.getNodeCount()+" sites"); - OldCompoundSurface[] origSurfs = new OldCompoundSurface[rupSet.getNumRuptures()]; - NewCompoundSurface[] newSurfs = new NewCompoundSurface[rupSet.getNumRuptures()]; + CompoundSurface[] origSurfs = new CompoundSurface[rupSet.getNumRuptures()]; + CompoundSurface[] newSurfs = new CompoundSurface[rupSet.getNumRuptures()]; File outputDir = new File("/tmp/compound_surf_test"); Preconditions.checkState(outputDir.exists() || outputDir.mkdir()); @@ -61,25 +60,18 @@ public static void main(String[] args) throws IOException { if (r < 10000 && r % 1000 == 0 || r % 10000 == 0) System.out.println("Processing rupture "+r); RuptureSurface surf = rupSet.getSurfaceForRupture(r, 1d); - OldCompoundSurface origSurf; - NewCompoundSurface newSurf; - if (surf instanceof OldCompoundSurface) { - origSurf = (OldCompoundSurface)surf; - newSurf = new NewCompoundSurface.Simple(origSurf.getSurfaceList(), rupSet.getFaultSectionDataForRupture(r)); - } else if (surf instanceof NewCompoundSurface) { - newSurf = (NewCompoundSurface)surf; - origSurf = new OldCompoundSurface(newSurf.getSurfaceList()); - } else { - throw new IllegalStateException("Unexpected surface type: "+surf.getClass()); - } + Preconditions.checkState(surf instanceof CompoundSurface); + + newSurfs[r] = (CompoundSurface)surf; + origSurfs[r] = new CompoundSurface.Simple(newSurfs[r].getSurfaceList()); // retains original ordering if (numDebug < maxNumDebug) { - if (!LocationUtils.areSimilar(origSurf.getFirstLocOnUpperEdge(), newSurf.getFirstLocOnUpperEdge()) - || !LocationUtils.areSimilar(origSurf.getLastLocOnUpperEdge(), newSurf.getLastLocOnUpperEdge()) - || !LocationUtils.areSimilar(origSurf.getFirstLocOnLowerEdge(), newSurf.getFirstLocOnLowerEdge()) - || !LocationUtils.areSimilar(origSurf.getLastLocOnLowerEdge(), newSurf.getLastLocOnLowerEdge())) { + if (!LocationUtils.areSimilar(origSurfs[r].getFirstLocOnUpperEdge(), newSurfs[r].getFirstLocOnUpperEdge()) + || !LocationUtils.areSimilar(origSurfs[r].getLastLocOnUpperEdge(), newSurfs[r].getLastLocOnUpperEdge()) + || !LocationUtils.areSimilar(origSurfs[r].getFirstLocOnLowerEdge(), newSurfs[r].getFirstLocOnLowerEdge()) + || !LocationUtils.areSimilar(origSurfs[r].getLastLocOnLowerEdge(), newSurfs[r].getLastLocOnLowerEdge())) { System.out.println("Ordering mismatch for "+r); - debugSurf(outputDir, rupSet, r, origSurf, newSurf); + debugSurf(outputDir, rupSet, r, origSurfs[r], newSurfs[r]); numDebug++; if (numDebug == maxNumDebug) { System.out.println("Bailing after "+numDebug+" debugs"); @@ -87,8 +79,8 @@ public static void main(String[] args) throws IOException { } } - origSurfs[r] = origSurf; - newSurfs[r] = newSurf; + origSurfs[r] = origSurfs[r]; + newSurfs[r] = newSurfs[r]; } System.out.println("DONE building"); @@ -123,7 +115,7 @@ public static void main(String[] args) throws IOException { } private static void debugSurf(File outputDir, FaultSystemRupSet rupSet, int rupIndex, - OldCompoundSurface origSurf, NewCompoundSurface newSurf) throws IOException { + CompoundSurface origSurf, CompoundSurface newSurf) throws IOException { List sects = rupSet.getFaultSectionDataForRupture(rupIndex); GeographicMapMaker mapMaker = new GeographicMapMaker(sects); mapMaker.setWriteGeoJSON(false); diff --git a/src/main/java/scratch/kevin/nshm23/SingleSiteHazardAndDataComparisonPageGen.java b/src/main/java/scratch/kevin/nshm23/SingleSiteHazardAndDataComparisonPageGen.java index 43f8003d..584759dd 100644 --- a/src/main/java/scratch/kevin/nshm23/SingleSiteHazardAndDataComparisonPageGen.java +++ b/src/main/java/scratch/kevin/nshm23/SingleSiteHazardAndDataComparisonPageGen.java @@ -64,7 +64,7 @@ import org.opensha.sha.earthquake.param.IncludeBackgroundParam; import org.opensha.sha.earthquake.param.ProbabilityModelOptions; import org.opensha.sha.earthquake.param.ProbabilityModelParam; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.RuptureSurface; import org.opensha.sha.gui.infoTools.IMT_Info; import org.opensha.sha.imr.AttenRelRef; @@ -957,8 +957,8 @@ public void run() { double moment = rate*MagUtils.magToMoment(mag); int fullIndexCount = 0; List fullIndexes = new ArrayList<>(); - if (surf instanceof OldCompoundSurface) { - List subSurfs = ((OldCompoundSurface)surf).getSurfaceList(); + if (surf instanceof CompoundSurface) { + List subSurfs = ((CompoundSurface)surf).getSurfaceList(); for (RuptureSurface subSurf : subSurfs) { int[] indexes; indexes = mappedIndexes.get(subSurf); @@ -1049,8 +1049,8 @@ static boolean canSkipNshmSurf(Region reg, RuptureSurface surf) { return false; } return !reg.contains(centroid); - } else if (surf instanceof OldCompoundSurface) { - for (RuptureSurface subSurf : ((OldCompoundSurface)surf).getSurfaceList()) { + } else if (surf instanceof CompoundSurface) { + for (RuptureSurface subSurf : ((CompoundSurface)surf).getSurfaceList()) { if (!canSkipNshmSurf(reg, subSurf)) // at least one is false, return false return false; diff --git a/src/main/java/scratch/kevin/nshm23/figures/Regional_MFD_Plots.java b/src/main/java/scratch/kevin/nshm23/figures/Regional_MFD_Plots.java index ef9ed1a4..c9736301 100644 --- a/src/main/java/scratch/kevin/nshm23/figures/Regional_MFD_Plots.java +++ b/src/main/java/scratch/kevin/nshm23/figures/Regional_MFD_Plots.java @@ -61,7 +61,7 @@ import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader.NSHM23_BaseRegion; import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader.SeismicityRegions; import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader.StitchedRegions; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.RuptureSurface; import org.opensha.sha.magdist.IncrementalMagFreqDist; import org.opensha.sha.util.TectonicRegionType; @@ -943,8 +943,8 @@ public List call() throws Exception { RuptureSurface surf = rup.getRuptureSurface(); int[] countsInside = new int[regions.length]; int count = 0; - if (surf instanceof OldCompoundSurface) { - for (RuptureSurface subSurf : ((OldCompoundSurface)surf).getSurfaceList()) { + if (surf instanceof CompoundSurface) { + for (RuptureSurface subSurf : ((CompoundSurface)surf).getSurfaceList()) { SurfInRegionsResult cached = subSurfInsideCache.get(subSurf); if (cached == null) { int subCount = 0; diff --git a/src/main/java/scratch/kevin/simulators/erf/RSQSimSectBundledERF.java b/src/main/java/scratch/kevin/simulators/erf/RSQSimSectBundledERF.java index 6b3a154c..e02440fc 100644 --- a/src/main/java/scratch/kevin/simulators/erf/RSQSimSectBundledERF.java +++ b/src/main/java/scratch/kevin/simulators/erf/RSQSimSectBundledERF.java @@ -49,7 +49,7 @@ import org.opensha.sha.earthquake.ProbEqkSource; import org.opensha.sha.earthquake.faultSysSolution.RupSetDeformationModel; import org.opensha.sha.earthquake.faultSysSolution.RupSetFaultModel; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.faultSurface.RuptureSurface; import org.opensha.sha.simulators.EventRecord; @@ -491,7 +491,7 @@ private static RuptureSurface buildSubSectSurface(List sortedRupSe if (rupSurfs.size() == 1) surf = rupSurfs.get(0); else - surf = new OldCompoundSurface(rupSurfs); + surf = CompoundSurface.get(rupSurfs); return surf; } @@ -756,7 +756,7 @@ public synchronized boolean isWithinBuffer(Location loc, Collection par if (surfs.size() == 1) compound = surfs.get(0); else - compound = new OldCompoundSurface(surfs); + compound = CompoundSurface.get(surfs); LocationList trace = compound.getEvenlyDiscritizedUpperEdge(); // this can have duplicates in it, remove those for (int p=trace.size(); --p>0;) { diff --git a/src/main/java/scratch/kevin/ucerf3/CatalogWriter.java b/src/main/java/scratch/kevin/ucerf3/CatalogWriter.java index a06b5b0b..a92ccbb7 100644 --- a/src/main/java/scratch/kevin/ucerf3/CatalogWriter.java +++ b/src/main/java/scratch/kevin/ucerf3/CatalogWriter.java @@ -15,7 +15,7 @@ import org.opensha.sha.earthquake.observedEarthquake.ObsEqkRupture; import org.opensha.sha.earthquake.observedEarthquake.parsers.UCERF3_CatalogParser; import org.opensha.sha.earthquake.observedEarthquake.parsers.ngaWest.NGAWestParser; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.EvenlyGriddedSurface; import org.opensha.sha.faultSurface.RuptureSurface; @@ -30,8 +30,8 @@ protected static void writeFiniteSurfaceFile(ObsEqkRupture rup, File file, doubl protected static void writeFiniteSurfaceFile(RuptureSurface rupSurf, File file, double distCutoff) throws IOException { ArrayList surfs = new ArrayList(); - if (rupSurf instanceof OldCompoundSurface) - surfs.addAll(((OldCompoundSurface)rupSurf).getSurfaceList()); + if (rupSurf instanceof CompoundSurface) + surfs.addAll(((CompoundSurface)rupSurf).getSurfaceList()); else surfs.add(rupSurf); diff --git a/src/main/java/scratch/kevin/ucerf3/CoulombFileParser.java b/src/main/java/scratch/kevin/ucerf3/CoulombFileParser.java index 13a2a85e..f3f0a439 100644 --- a/src/main/java/scratch/kevin/ucerf3/CoulombFileParser.java +++ b/src/main/java/scratch/kevin/ucerf3/CoulombFileParser.java @@ -15,7 +15,7 @@ import org.opensha.commons.util.FileUtils; import org.opensha.sha.earthquake.observedEarthquake.ObsEqkRupList; import org.opensha.sha.earthquake.observedEarthquake.ObsEqkRupture; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.EvenlyGriddedSurface; import org.opensha.sha.faultSurface.FaultTrace; import org.opensha.sha.faultSurface.RuptureSurface; @@ -116,7 +116,7 @@ public static RuptureSurface loadFaultSurface(File file, double gridSpacing) thr if (surfs.size() == 1) return surfs.get(0); - return new OldCompoundSurface(surfs); + return CompoundSurface.get(surfs); } private static Location getRelativeLocation(Location origin, double xOffset, double yOffset) { diff --git a/src/main/java/scratch/kevin/ucerf3/QuadToEvenlyGridded.java b/src/main/java/scratch/kevin/ucerf3/QuadToEvenlyGridded.java index bb1be992..57cae597 100644 --- a/src/main/java/scratch/kevin/ucerf3/QuadToEvenlyGridded.java +++ b/src/main/java/scratch/kevin/ucerf3/QuadToEvenlyGridded.java @@ -11,7 +11,7 @@ import org.opensha.commons.geo.LocationVector; import org.opensha.sha.earthquake.observedEarthquake.parsers.ngaWest.NGAWestParser; import org.opensha.sha.faultSurface.ApproxEvenlyGriddedSurface; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.EvenlyGriddedSurface; import org.opensha.sha.faultSurface.FaultTrace; import org.opensha.sha.faultSurface.GriddedSubsetSurface; @@ -200,7 +200,7 @@ public static void main(String[] args) throws IOException { } surfs.add(gridded); } else { - for (RuptureSurface rupSurf : ((OldCompoundSurface)surf).getSurfaceList()) { + for (RuptureSurface rupSurf : ((CompoundSurface)surf).getSurfaceList()) { EvenlyGriddedSurface gridded = (EvenlyGriddedSurface)rupSurf; if (gridded.getNumCols() < 2 || gridded.getNumRows() < 2) { System.out.println("Weird size: "+gridded.getNumRows()+"x"+gridded.getNumCols()); diff --git a/src/main/java/scratch/kevin/ucerf3/UCERF3_IM_Calculator.java b/src/main/java/scratch/kevin/ucerf3/UCERF3_IM_Calculator.java index 859d9b9a..43b68fad 100644 --- a/src/main/java/scratch/kevin/ucerf3/UCERF3_IM_Calculator.java +++ b/src/main/java/scratch/kevin/ucerf3/UCERF3_IM_Calculator.java @@ -28,7 +28,7 @@ import org.opensha.sha.earthquake.faultSysSolution.modules.GridSourceProvider; import org.opensha.sha.earthquake.param.IncludeBackgroundOption; import org.opensha.sha.earthquake.param.IncludeBackgroundParam; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.FaultTrace; import org.opensha.sha.faultSurface.RuptureSurface; import org.opensha.sha.imr.AttenRelRef; @@ -324,10 +324,10 @@ private List pruneDuplicates(List locs) { } private RuptureSurface closestSubSurf(RuptureSurface surf, Location loc) { - if (surf instanceof OldCompoundSurface) { + if (surf instanceof CompoundSurface) { double minDist = Double.POSITIVE_INFINITY; RuptureSurface closest = null; - for (RuptureSurface subSurf : ((OldCompoundSurface)surf).getSurfaceList()) { + for (RuptureSurface subSurf : ((CompoundSurface)surf).getSurfaceList()) { double dist = subSurf.getDistanceRup(loc); if (dist < minDist) { minDist = dist; diff --git a/src/main/java/scratch/kevin/ucerf3/etas/MapStorageCalcs.java b/src/main/java/scratch/kevin/ucerf3/etas/MapStorageCalcs.java index 91250546..c9196f38 100644 --- a/src/main/java/scratch/kevin/ucerf3/etas/MapStorageCalcs.java +++ b/src/main/java/scratch/kevin/ucerf3/etas/MapStorageCalcs.java @@ -14,7 +14,7 @@ import org.opensha.commons.geo.Region; import org.opensha.refFaultParamDb.vo.FaultSectionPrefData; import org.opensha.sha.earthquake.faultSysSolution.FaultSystemRupSet; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.faultSurface.RuptureSurface; @@ -132,7 +132,7 @@ public static void main(String[] args) throws IOException, DocumentException { if (surfs.size() == 1) compound = surfs.get(0); else - compound = new OldCompoundSurface(surfs); + compound = CompoundSurface.get(surfs); LocationList trace = compound.getEvenlyDiscritizedUpperEdge(); // this can have duplicates in it, remove those for (int p=trace.size(); --p>0;) { diff --git a/src/main/java/scratch/kevin/ucerf3/inversion/ERFInsideSpeedTest.java b/src/main/java/scratch/kevin/ucerf3/inversion/ERFInsideSpeedTest.java index 24465920..4f43ce5a 100644 --- a/src/main/java/scratch/kevin/ucerf3/inversion/ERFInsideSpeedTest.java +++ b/src/main/java/scratch/kevin/ucerf3/inversion/ERFInsideSpeedTest.java @@ -13,7 +13,7 @@ import org.opensha.sha.earthquake.EqkRupture; import org.opensha.sha.earthquake.ProbEqkSource; import org.opensha.sha.earthquake.calc.ERF_Calculator; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.RupInRegionCache; import org.opensha.sha.faultSurface.RuptureSurface; @@ -56,7 +56,7 @@ public static void main(String[] args) throws ZipException, IOException { public boolean isRupInRegion(ERF erf, ProbEqkSource src, EqkRupture rup, int srcIndex, int rupIndex, Region region) { RuptureSurface surf = rup.getRuptureSurface(); - if (surf instanceof OldCompoundSurface) { + if (surf instanceof CompoundSurface) { ConcurrentMap regMap = map.get(region); if (regMap == null) { regMap = Maps.newConcurrentMap(); diff --git a/src/main/java/scratch/ned/nshm23/MiscPlots.java b/src/main/java/scratch/ned/nshm23/MiscPlots.java index c4a2eaa5..763475bb 100644 --- a/src/main/java/scratch/ned/nshm23/MiscPlots.java +++ b/src/main/java/scratch/ned/nshm23/MiscPlots.java @@ -34,7 +34,7 @@ import org.opensha.sha.earthquake.rupForecastImpl.nshm23.logicTree.NSHM23_ScalingRelationships_StableContinental; import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader.AnalysisRegions; import org.opensha.sha.earthquake.rupForecastImpl.nshm23.util.NSHM23_RegionLoader.SeismicityRegions; -import org.opensha.sha.faultSurface.OldCompoundSurface; +import org.opensha.sha.faultSurface.CompoundSurface; import org.opensha.sha.faultSurface.FaultSection; import org.opensha.sha.faultSurface.RuptureSurface; import org.opensha.sha.magdist.GutenbergRichterMagFreqDist; @@ -921,7 +921,7 @@ public static SummedMagFreqDist OLDgetMagFreqDistInRegion(NshmErf erf, Region re } } else { // it's a CompoundSurface - OldCompoundSurface compSurf = (OldCompoundSurface) surf; + CompoundSurface compSurf = (CompoundSurface) surf; List surfList = compSurf.getSurfaceList(); double ptRate = equivRate/surfList.size(); for(RuptureSurface surface: surfList) {