Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion core/src/commonMain/kotlin/com/sunya/cdm/api/Group.kt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ package com.sunya.cdm.api

import com.sunya.cdm.util.InternalLibraryApi
import com.sunya.cdm.util.makeValidCdmObjectName
import io.github.oshai.kotlinlogging.KotlinLogging

class Group(orgName : String,
val typedefs : List<Typedef>,
Expand Down Expand Up @@ -154,7 +155,7 @@ class Group(orgName : String,
// add if vb name not already added
fun addVariable(vb: Variable.Builder<*>) : Builder {
if (vb.datatype == Datatype.REFERENCE) {
println("skip REFERENCE variable $vb")
logger.warn{"skip REFERENCE variable $vb"}
return this
}
if (variables.find {it.name == vb.name } == null) {
Expand Down Expand Up @@ -234,4 +235,8 @@ class Group(orgName : String,
return Group(useName, typedefs, dimensions, attributes, variables, groups, parent)
}
}

companion object {
val logger = KotlinLogging.logger("Group")
}
}
30 changes: 15 additions & 15 deletions core/src/commonMain/kotlin/com/sunya/cdm/api/Netchdf.kt
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package com.sunya.cdm.api

import com.sunya.cdm.array.ArrayTyped
import com.sunya.cdm.iosp.ReadChunkConcurrent
import com.sunya.cdm.util.CdmFullNames

interface Netchdf : AutoCloseable {
Expand All @@ -16,22 +15,23 @@ interface Netchdf : AutoCloseable {
}

// TODO I think the output type is not always the input type
fun <T> readArrayData(v2: Variable<T>, section: SectionPartial? = null) : ArrayTyped<T>
fun <T> readArrayData(v2: Variable<T>, wantSection: SectionPartial? = null) : ArrayTyped<T>

// iterate over all the chunks in section, order is arbitrary.
fun <T> chunkIterator(v2: Variable<T>, section: SectionPartial? = null, maxElements : Int? = null) : Iterator<ArraySection<T>>
// iterate over all the chunks in section, order is arbitrary. TODO where is intersection with wantSection done ??
fun <T> chunkIterator(v2: Variable<T>, wantSection: SectionPartial? = null, maxElements : Int? = null) : Iterator<ArraySection<T>>

fun <T> readChunksConcurrent(v2: Variable<T>,
lamda : (ArraySection<*>) -> Unit,
done : () -> Unit,
nthreads: Int? = null) {
TODO()
}
}

// the section describes the array chunk reletive to the variable's shape.
data class ArraySection<T>(val array : ArrayTyped<T>, val section : Section)

// Experimental: read concurrently chunks of data, call back with lamda, order is arbitrary.
fun <T> Netchdf.readChunksConcurrent(v2: Variable<T>,
section: SectionPartial? = null,
maxElements : Int? = null,
nthreads: Int = 20,
lamda : (ArraySection<T>) -> Unit) {
val reader = ReadChunkConcurrent()
val chunkIter = this.chunkIterator( v2, section, maxElements)
reader.readChunks(nthreads, chunkIter, lamda)
data class ArraySection<T>(val array : ArrayTyped<T>, val section : Section) {
fun intersect(wantSection: SectionPartial) : ArrayTyped<T> {
// TODO
return array
}
}
12 changes: 12 additions & 0 deletions core/src/commonMain/kotlin/com/sunya/cdm/api/Section.kt
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,18 @@ data class Section(val ranges : List<LongProgression>, val varShape : LongArray)
result = 31 * result + varShape.contentHashCode()
return result
}

override fun toString(): String {
return "Section(${ranges.show()}, shape=${shape.contentToString()}, totalElements=$totalElements, varShape=${varShape.contentToString()})"
}
}

fun List<LongProgression>.show() = buildString {
forEach { p: LongProgression ->
append("[${p.first}:${p.last}")
if (p.step != 1L) append(":${p.step}")
append("]")
}
}

/** A partially filled section of multidimensional array indices. */
Expand Down

This file was deleted.

4 changes: 2 additions & 2 deletions core/src/commonMain/kotlin/com/sunya/netchdf/hdf4/Hdf4File.kt
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@ class Hdf4File(val filename : String) : Netchdf {
override fun type() = header.type()
override val size : Long get() = raf.size()

override fun <T> readArrayData(v2: Variable<T>, section: SectionPartial?): ArrayTyped<T> {
override fun <T> readArrayData(v2: Variable<T>, wantSection: SectionPartial?): ArrayTyped<T> {
if (v2.nelems == 0L) {
return ArrayEmpty(v2.shape.toIntArray(), v2.datatype)
}
val filledSection = SectionPartial.fill(section, v2.shape)
val filledSection = SectionPartial.fill(wantSection, v2.shape)
return if (v2.datatype == Datatype.COMPOUND) {
readStructureDataArray(v2, filledSection) as ArrayTyped<T>
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,16 @@ import com.sunya.cdm.layout.Tiling
import com.sunya.cdm.util.InternalLibraryApi
import kotlin.collections.mutableListOf

/** a BTree1 that uses OpenFileExtended */
internal class BTree1ext(
/** a BTree1 that uses OpenFileExtended and tracks its own tiling. */
internal class BTree1data(
val raf: OpenFileExtended,
val rootNodeAddress: Long,
val nodeType : Int, // 0 = group/symbol table, 1 = raw data chunks
varShape: LongArray,
chunkShape: LongArray,
) {
val tiling = Tiling(varShape, chunkShape)
val ndimStorage = chunkShape.size

init {
// println(" BTreeNode varShape ${varShape.contentToString()} chunkShape ${chunkShape.contentToString()}")
require (nodeType == 1)
}

fun rootNode(): BTreeNode = BTreeNode(rootNodeAddress, null)

// here both internal and leaf are the same structure
Expand All @@ -33,7 +27,6 @@ internal class BTree1ext(
private val leftAddress: Long
private val rightAddress: Long

// type 1
val keys = mutableListOf<LongArray>()
val values = mutableListOf<DataChunkIF>()
val children = mutableListOf<BTreeNode>()
Expand All @@ -44,26 +37,34 @@ internal class BTree1ext(
check(magic == "TREE") { "DataBTree doesnt start with TREE" }

val type: Int = raf.readByte(state).toInt()
check(type == nodeType) { "DataBTree must be type $nodeType" }
check(type == 1) { "DataBTree must be type 1" }

level = raf.readByte(state).toInt() // leaf nodes are level 0
nentries = raf.readShort(state).toInt() // number of children to which this node points
leftAddress = raf.readOffset(state)
rightAddress = raf.readOffset(state)

// println(" BTreeNode level $level nentries $nentries")

for (idx in 0 until nentries) {
if (nentries == 0) {
val chunkSize = raf.readInt(state)
val filterMask = raf.readInt(state)
val inner = LongArray(ndimStorage) { j -> raf.readLong(state) }
val key = DataChunkKey(chunkSize, filterMask, inner)
val childPointer = raf.readAddress(state) // 4 or 8 bytes, then add fileOffset
if (level == 0) {
keys.add(inner)
values.add(DataChunkEntry1(level, this, idx, key, childPointer))
} else {
children.add(BTreeNode(childPointer, this))
val childPointer = raf.readAddress(state)
keys.add(inner)
values.add(DataChunkEntry1(this, key, childPointer))
} else {
repeat(nentries) {
val chunkSize = raf.readInt(state)
val filterMask = raf.readInt(state)
val inner = LongArray(ndimStorage) { j -> raf.readLong(state) }
val key = DataChunkKey(chunkSize, filterMask, inner)
val childPointer = raf.readAddress(state) // 4 or 8 bytes, then add fileOffset
if (level == 0) {
keys.add(inner)
values.add(DataChunkEntry1( this, key, childPointer))
} else {
children.add(BTreeNode(childPointer, this))
}
}
}

Expand Down Expand Up @@ -100,15 +101,15 @@ internal class BTree1ext(
}

// childAddress = data chunk (level 1) else a child node
data class DataChunkEntry1(val level : Int, val parent : BTreeNode, val idx : Int, val key : DataChunkKey, val childAddress : Long) : DataChunkIF {
data class DataChunkEntry1(val parent : BTreeNode, val key : DataChunkKey, val childAddress : Long) : DataChunkIF {
override fun childAddress() = childAddress
override fun offsets() = key.offsets
override fun isMissing() = (childAddress == -1L)
override fun isMissing() = (childAddress <= 0L) // may be 0 or -1
override fun chunkSize() = key.chunkSize
override fun filterMask() = key.filterMask

override fun show(tiling : Tiling) : String = "chunkSize=${key.chunkSize}, chunkStart=${key.offsets.contentToString()}" +
", tile= ${tiling.tile(key.offsets).contentToString()} idx=$idx"
", tile= ${tiling.tile(key.offsets).contentToString()}"
}
}

Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package com.sunya.netchdf.hdf5


import com.sunya.cdm.api.computeSize
import com.sunya.cdm.api.toIntArray
import com.sunya.cdm.iosp.OpenFileIF
Expand All @@ -15,7 +14,7 @@ import kotlin.math.pow
@OptIn(InternalLibraryApi::class)

/* Btree version 2, for data. From jhdf. */
internal class BTree2j(private val h5: H5builder, owner: String, address: Long, storageDims: LongArray? = null) { // BTree2
internal class BTree2data(private val h5: H5builder, owner: String, address: Long, storageDims: LongArray? = null) { // BTree2
val btreeType: Int
private val nodeSize: Int // size in bytes of btree nodes
private val recordSize: Int // size in bytes of btree records
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@ import com.sunya.cdm.util.InternalLibraryApi
import io.github.oshai.kotlinlogging.KotlinLogging
import kotlin.math.ceil

private val logger = KotlinLogging.logger("ChunkedDataLayoutMessageV4")

// DataLayoutMessage version 4, layout class 2 (chunked), chunkIndexingType 1-5
// jhdf

Expand Down Expand Up @@ -213,6 +211,9 @@ internal class FixedArrayIndex(val h5: H5builder, val varShape: IntArray, val md

fun chunkIterator() : Iterator<ChunkImpl> = chunks.iterator()

companion object {
val logger = KotlinLogging.logger("ChunkedDataLayoutMessageV4")
}
}

/////////////////////////////////////////////////
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,12 +174,12 @@ internal class FractalHeap(private val h5: H5builder, forWho: String, address: L
when (subtype) {
1, 2 -> {
if (btreeHugeObjects == null) { // lazy
val local = BTree2j(h5, "FractalHeap btreeHugeObjects", btreeAddressHugeObjects)
val local = BTree2data(h5, "FractalHeap btreeHugeObjects", btreeAddressHugeObjects)
require(local.btreeType == subtype)
btreeHugeObjects = local.records
}

val record1: BTree2j.Record1? = BTree2j.findRecord1byId(btreeHugeObjects!!, offset)
val record1: BTree2data.Record1? = BTree2data.findRecord1byId(btreeHugeObjects!!, offset)
if (record1 == null) {
throw RuntimeException("Cant find DHeapId=$offset")
}
Expand Down Expand Up @@ -385,7 +385,7 @@ internal class FractalHeap(private val h5: H5builder, forWho: String, address: L
}

companion object {
private val logger = KotlinLogging.logger("H5builder")
private val logger = KotlinLogging.logger("FractalHeap")
var debugDetail = false
var debugFractalHeap = false
var debugPos = false
Expand Down
Loading