Skip to content

Commit 2926435

Browse files
authored
Merge pull request #227 from saalfeldlab/cloud-v2
Add support writing to google cloud storage
2 parents 211670f + 55de48c commit 2926435

7 files changed

Lines changed: 38 additions & 38 deletions

File tree

render-app/src/main/java/org/janelia/alignment/util/NeuroglancerAttributes.java

Lines changed: 14 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
import java.util.List;
1010
import java.util.Map;
1111

12-
import org.janelia.saalfeldlab.n5.N5FSWriter;
1312
import org.janelia.saalfeldlab.n5.N5Writer;
1413
import org.slf4j.Logger;
1514
import org.slf4j.LoggerFactory;
@@ -135,13 +134,13 @@ public NeuroglancerAttributes(final List<Double> stackResolutionValues,
135134
/**
136135
* Writes all attribute.json files required by neuroglancer to display the specified dataset.
137136
*
138-
* @param n5BasePath base path for n5.
137+
* @param n5Writer N5 writer to use for writing the attributes.
139138
* @param fullScaleDatasetPath path of the full scale data set.
140139
*
141140
* @throws IOException
142141
* if the writes fail for any reason.
143142
*/
144-
public void write(final Path n5BasePath,
143+
public void write(final N5Writer n5Writer,
145144
final Path fullScaleDatasetPath)
146145
throws IOException {
147146

@@ -150,10 +149,8 @@ public void write(final Path n5BasePath,
150149
final Path ngAttributesPath = isMultiScaleDataset ?
151150
fullScaleDatasetPath.getParent() : fullScaleDatasetPath;
152151

153-
LOG.info("write: entry, n5BasePath={}, fullScaleDatasetPath={}, ngAttributesPath={}",
154-
n5BasePath, fullScaleDatasetPath, ngAttributesPath);
155-
156-
final N5Writer n5Writer = new N5FSWriter(n5BasePath.toAbsolutePath().toString());
152+
LOG.info("write: entry, n5Base={}, fullScaleDatasetPath={}, ngAttributesPath={}",
153+
n5Writer.getURI(), fullScaleDatasetPath, ngAttributesPath);
157154

158155
// Neuroglancer recursively looks for attribute.json files from root path and stops at
159156
// the first subdirectory without an attributes.json file.
@@ -164,7 +161,7 @@ public void write(final Path n5BasePath,
164161
for (Path path = ngAttributesPath.getParent();
165162
(path != null) && (! path.endsWith("/"));
166163
path = path.getParent()) {
167-
LOG.info("write: saving supported attribute to {}{}/attributes.json", n5BasePath, path);
164+
LOG.info("write: saving supported attribute to {}{}/attributes.json", n5Writer.getURI(), path);
168165
n5Writer.setAttribute(path.toString(), SUPPORTED_KEY, true);
169166
}
170167

@@ -180,15 +177,15 @@ public void write(final Path n5BasePath,
180177
attributes.put("pixelResolution", pixelResolution);
181178
attributes.put("translate", translate);
182179

183-
LOG.info("write: saving neuroglancer attributes to {}{}/attributes.json", n5BasePath, ngAttributesPath);
180+
LOG.info("write: saving neuroglancer attributes to {}{}/attributes.json", n5Writer.getURI(), ngAttributesPath);
184181
n5Writer.setAttributes(ngAttributesPath.toString(), attributes);
185182

186183
if (isMultiScaleDataset) {
187184
for (int scaleLevel = 0; scaleLevel < scales.size(); scaleLevel++) {
188185
writeScaleLevelTransformAttributes(scaleLevel,
189186
scales.get(scaleLevel),
190187
n5Writer,
191-
n5BasePath,
188+
n5Writer.getURI().toString(),
192189
ngAttributesPath);
193190
}
194191
}
@@ -197,16 +194,18 @@ public void write(final Path n5BasePath,
197194
private void writeScaleLevelTransformAttributes(final int scaleLevel,
198195
final List<Integer> scaleLevelFactors,
199196
final N5Writer n5Writer,
200-
final Path n5BasePath,
197+
final String n5Base,
201198
final Path ngAttributesPath)
202199
throws IOException {
203200

204201
final String scaleName = "s" + scaleLevel;
205202
final Path scaleAttributesPath = Paths.get(ngAttributesPath.toString(), scaleName);
206203

207-
final Path scaleLevelDirectoryPath = Paths.get(n5BasePath.toString(), ngAttributesPath.toString(), scaleName);
208-
if (! scaleLevelDirectoryPath.toFile().exists()) {
209-
throw new IOException(scaleLevelDirectoryPath.toAbsolutePath() + " does not exist");
204+
if (n5Base.startsWith("/") || n5Base.startsWith("\\")) {
205+
final Path scaleLevelDirectoryPath = Paths.get(n5Base, ngAttributesPath.toString(), scaleName);
206+
if (! scaleLevelDirectoryPath.toFile().exists()) {
207+
throw new IOException(scaleLevelDirectoryPath.toAbsolutePath() + " does not exist");
208+
}
210209
}
211210

212211
final Map<String, Object> transformAttributes = new HashMap<>();
@@ -232,7 +231,7 @@ private void writeScaleLevelTransformAttributes(final int scaleLevel,
232231
final Map<String, Object> attributes = new HashMap<>();
233232
attributes.put("transform", transformAttributes);
234233

235-
LOG.info("writeScaleLevelTransformAttributes: saving {}{}/attributes.json", n5BasePath, scaleAttributesPath);
234+
LOG.info("writeScaleLevelTransformAttributes: saving {}{}/attributes.json", n5Base, scaleAttributesPath);
236235
n5Writer.setAttributes(scaleAttributesPath.toString(), attributes);
237236
}
238237

render-ws-java-client/src/main/java/org/janelia/render/client/n5/CrossCorrelationWithNextRegionalDataN5Writer.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ private static void createDataSet(final List<CrossCorrelationWithNextRegionalDat
132132
stackResolutionUnit,
133133
rowCount,
134134
columnCount);
135-
ngAttributes.write(Paths.get(basePath), Paths.get(datasetName));
135+
ngAttributes.write(n5Writer, Paths.get(datasetName));
136136

137137
final String dataSetPath = attributesPath.getParent().toString();
138138
final Path ccDataPath = Paths.get(dataSetPath, "cc_regional_data.json.gz");
@@ -161,7 +161,7 @@ private static void createDataSet(final List<CrossCorrelationWithNextRegionalDat
161161
Files.write(ngUrlPath, ngUrlString.getBytes());
162162
LOG.info("createDataSet: neuroglancer URL written to {}", ngUrlPath);
163163
} catch (final IOException e) {
164-
LOG.warn("ignoring failure to write " + ngUrlPath, e);
164+
LOG.warn("ignoring failure to write {}", ngUrlPath, e);
165165
}
166166
}
167167

@@ -327,7 +327,7 @@ private static String buildNgLayerStringForLatestRenderExport(final StackId stac
327327
exportPrefix, renderExportProjectDir);
328328
}
329329
} catch (final IOException e) {
330-
LOG.warn("ignoring failure to list files in " + renderExportProjectDir, e);
330+
LOG.warn("ignoring failure to list files in {}", renderExportProjectDir, e);
331331
}
332332
} else {
333333
LOG.warn("buildNgLayerStringForLatestRenderExport: failed to find render export directory {}",

render-ws-spark-client/src/main/java/org/janelia/render/client/spark/multisem/ExportMichalSegmentationsClient.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -242,6 +242,8 @@ private void runForStack(
242242
Arrays.asList(min[0], min[1], min[2]),
243243
NeuroglancerAttributes.NumpyContiguousOrdering.FORTRAN);
244244

245-
ngAttributes.write(Paths.get(parameters.targetN5Path), Paths.get(targetDataset, "s0"));
245+
try (final N5Writer n5 = new N5FSWriter(parameters.targetN5Path)) {
246+
ngAttributes.write(n5, Paths.get(targetDataset, "s0"));
247+
}
246248
}
247249
}

render-ws-spark-client/src/main/java/org/janelia/render/client/spark/n5/H5TileToN5PreviewClient.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -559,8 +559,7 @@ private void exportPreview(final JavaSparkContext sparkContext,
559559
Arrays.asList(exportInfo.min[0], exportInfo.min[1], exportInfo.min[2]),
560560
NeuroglancerAttributes.NumpyContiguousOrdering.FORTRAN);
561561

562-
ngAttributes.write(Paths.get(exportInfo.n5PathString),
563-
Paths.get(exportInfo.fullScaleDatasetName));
562+
ngAttributes.write(n5Supplier.get(), Paths.get(exportInfo.fullScaleDatasetName));
564563
}
565564

566565
private static long findMinZToRender(final DatasetAttributes datasetAttributes,

render-ws-spark-client/src/main/java/org/janelia/render/client/spark/n5/N5Client.java

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -38,10 +38,11 @@
3838
import org.janelia.render.client.zspacing.ThicknessCorrectionData;
3939
import org.janelia.saalfeldlab.n5.DataType;
4040
import org.janelia.saalfeldlab.n5.GzipCompression;
41-
import org.janelia.saalfeldlab.n5.N5FSWriter;
4241
import org.janelia.saalfeldlab.n5.N5Writer;
4342
import org.janelia.saalfeldlab.n5.imglib2.N5Utils;
4443
import org.janelia.saalfeldlab.n5.spark.supplier.N5WriterSupplier;
44+
import org.janelia.saalfeldlab.n5.universe.N5Factory;
45+
import org.janelia.saalfeldlab.n5.universe.N5Factory.StorageFormat;
4546
import org.slf4j.Logger;
4647
import org.slf4j.LoggerFactory;
4748

@@ -335,13 +336,12 @@ public void run()
335336
}
336337

337338
int numberOfDownSampledDatasets = 0;
339+
final N5WriterSupplier n5Supplier = new Util.N5PathSupplier(parameters.n5Path);
338340
if (downsampleStack) {
339341

340342
LOG.info("run: downsample stack with factors {}", Arrays.toString(downsampleFactors));
341343

342344
// Now that the full resolution image is saved into n5, generate the scale pyramid
343-
final N5WriterSupplier n5Supplier = new Util.N5PathSupplier(parameters.n5Path);
344-
345345
final List<String> downsampledDatasetPaths =
346346
downsampleScalePyramid(sparkContext,
347347
n5Supplier,
@@ -361,8 +361,7 @@ public void run()
361361
Arrays.asList(min[0], min[1], min[2]),
362362
NeuroglancerAttributes.NumpyContiguousOrdering.FORTRAN);
363363

364-
ngAttributes.write(Paths.get(parameters.n5Path),
365-
Paths.get(fullScaleDatasetName));
364+
ngAttributes.write(n5Supplier.get(), Paths.get(fullScaleDatasetName));
366365

367366
if (downsampleStackForReview) {
368367

@@ -406,8 +405,7 @@ public void run()
406405
Arrays.asList(min[0], min[1], min[2]),
407406
NeuroglancerAttributes.NumpyContiguousOrdering.FORTRAN);
408407

409-
reviewNgAttributes.write(Paths.get(parameters.n5Path),
410-
Paths.get(fullScaleReviewDatasetName));
408+
reviewNgAttributes.write(n5ReviewSupplier.get(), Paths.get(fullScaleReviewDatasetName));
411409
}
412410

413411
sparkContext.close();
@@ -475,7 +473,7 @@ public static void setupFullScaleExportN5(final Parameters parameters,
475473
final int[] blockSize,
476474
final DataType dataType) {
477475

478-
try (final N5Writer n5 = new N5FSWriter(parameters.n5Path)) {
476+
try (final N5Writer n5 = new N5Factory().openWriter(StorageFormat.N5, parameters.n5Path)) {
479477
n5.createDataset(fullScaleDatasetName,
480478
dimensions,
481479
blockSize,
@@ -494,7 +492,7 @@ public static void updateFullScaleExportAttributes(final Parameters parameters,
494492

495493
String exportAttributesDatasetName = fullScaleDatasetName;
496494

497-
try (final N5Writer n5 = new N5FSWriter(parameters.n5Path)) {
495+
try (final N5Writer n5 = new N5Factory().openWriter(StorageFormat.N5, parameters.n5Path)) {
498496
final Map<String, Object> export_attributes = new HashMap<>();
499497
export_attributes.put("runTimestamp", new Date());
500498
export_attributes.put("runParameters", parameters);
@@ -661,8 +659,9 @@ private static void saveRenderStack(final JavaSparkContext sc,
661659
}
662660
}
663661

664-
final N5Writer anotherN5Writer = new N5FSWriter(n5Path); // needed to prevent Spark serialization error
662+
final N5Writer anotherN5Writer = new N5Factory().openWriter(StorageFormat.N5, n5Path); // needed to prevent Spark serialization error
665663
N5Utils.saveNonEmptyBlock(block, anotherN5Writer, datasetName, gridBlock.gridPosition, new UnsignedByteType(0));
664+
anotherN5Writer.close();
666665
});
667666
}
668667

@@ -722,8 +721,9 @@ private static void save2DRenderStack(final JavaSparkContext sc,
722721
out.next().set(in.next());
723722
}
724723

725-
final N5Writer anotherN5Writer = new N5FSWriter(n5Path); // needed to prevent Spark serialization error
724+
final N5Writer anotherN5Writer = new N5Factory().openWriter(StorageFormat.N5, n5Path); // needed to prevent Spark serialization error
726725
N5Utils.saveNonEmptyBlock(block, anotherN5Writer, datasetName, gridBlock.gridPosition, new UnsignedByteType(0));
726+
anotherN5Writer.close();
727727
});
728728

729729
LOG.info("save2DRenderStack: exit");

render-ws-spark-client/src/main/java/org/janelia/render/client/spark/n5/Util.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,9 @@
22

33
import java.io.IOException;
44

5-
import org.janelia.saalfeldlab.n5.N5FSWriter;
65
import org.janelia.saalfeldlab.n5.N5Writer;
76
import org.janelia.saalfeldlab.n5.spark.supplier.N5WriterSupplier;
7+
import org.janelia.saalfeldlab.n5.universe.N5Factory;
88

99
/**
1010
* Utilities for N5 operations.
@@ -22,7 +22,7 @@ public N5PathSupplier(final String path) {
2222
@Override
2323
public N5Writer get()
2424
throws IOException {
25-
return new N5FSWriter(path);
25+
return new N5Factory().openWriter(N5Factory.StorageFormat.N5, path);
2626
}
2727
}
2828

render-ws-spark-client/src/test/java/org/janelia/render/client/spark/n5/N5ClientTest.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -150,21 +150,21 @@ public void testSetupFullScaleExportN5() throws Exception {
150150
@Test
151151
public void testNeuroglancerAttributes() throws Exception {
152152

153-
final Path n5Path = n5PathDirectory.toPath().toAbsolutePath();
153+
final String n5Path = n5PathDirectory.getAbsolutePath();
154154
final Path fullScaleDatasetPath = Paths.get("/render/test_stack/one_more_nested_dir/s0");
155155
final String datasetName = fullScaleDatasetPath.toString();
156156

157157
final long[] dimensions = { 100L, 200L, 300L };
158158
final int[] blockSize = { 10, 20, 30 };
159-
try (final N5Writer n5Writer = new N5FSWriter(n5Path.toString())) {
159+
try (final N5Writer n5Writer = new N5FSWriter(n5Path)) {
160160

161161
final DatasetAttributes datasetAttributes = new DatasetAttributes(dimensions,
162162
blockSize,
163163
DataType.UINT8,
164164
new GzipCompression());
165165
n5Writer.createDataset(datasetName, datasetAttributes);
166166

167-
final N5Reader n5Reader = new N5FSReader(n5Path.toString());
167+
final N5Reader n5Reader = new N5FSReader(n5Path);
168168
Assert.assertTrue("dataset " + datasetName + " is missing", n5Reader.datasetExists(datasetName));
169169

170170
final Map<String, Object> originalDatasetAttributes = datasetAttributes.asMap();
@@ -198,7 +198,7 @@ public void testNeuroglancerAttributes() throws Exception {
198198
Arrays.asList(5L, 25L, 125L),
199199
NeuroglancerAttributes.NumpyContiguousOrdering.C);
200200

201-
ngAttributes.write(n5Path, fullScaleDatasetPath);
201+
ngAttributes.write(n5Writer, fullScaleDatasetPath);
202202

203203
final String testStackDatasetName = fullScaleDatasetPath.getParent().toString();
204204

0 commit comments

Comments
 (0)