Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 41 additions & 40 deletions Sources/VimKit/Database+Importer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ extension Database {
if shouldImport {
ImportTaskTracker.shared.tasks[sha256Hash] = true
let importer = Database.ImportActor(self)
await importer.import(limit)
await importer.import()
}
}

Expand Down Expand Up @@ -95,10 +95,8 @@ extension Database {
}.store(in: &subscribers)
}


/// Starts the import process.
/// - Parameter limit: the max limit of models per entity to import
func `import`(_ limit: Int = .max) {
func `import`() {

let group = DispatchGroup()
let start = Date.now
Expand All @@ -122,7 +120,7 @@ extension Database {
debugPrint("􁃎 [\(modelName)] - skipping cache warming")
return
}
warmCache(modelType, limit)
warmCache(modelType)
}
}

Expand All @@ -148,7 +146,7 @@ extension Database {
debugPrint("􁃎 [\(modelName)] - skipping import")
return
}
importModel(modelType, limit)
importModel(modelType)
}
}

Expand Down Expand Up @@ -177,36 +175,35 @@ extension Database {
/// Warms the cache for the specified model type.
/// - Parameters:
/// - modelType: the type of model
/// - limit: the cache size limit
private func warmCache(_ modelType: any IndexedPersistentModel.Type, _ limit: Int) {
private func warmCache(_ modelType: any IndexedPersistentModel.Type) {
guard let table = database.tables[modelType.modelName] else { return }
let count = min(table.rows.count, limit)
_ = modelType.warm(size: count, cache: cache)
modelType.warm(table: table, cache: cache)
}

/// Imports models of the specified type into the model container.
/// - Parameters:
/// - modelType: the model type
/// - limit: the max limit of models to import
private func importModel(_ modelType: any IndexedPersistentModel.Type, _ limit: Int) {
private func importModel(_ modelType: any IndexedPersistentModel.Type) {
let modelName = modelType.modelName
guard let table = database.tables[modelName] else { return }
guard let modelCache = cache.caches[modelName] else { return }

let keys = modelCache.keys
let start = Date.now
let rowCount = table.rows.count
let rowCount = modelCache.keys.count//table.rows.count
var state: ModelMetadata.State = .unknown

defer {
let timeInterval = abs(start.timeIntervalSinceNow)
debugPrint("􂂼 [\(modelName)] - [\(state)] [\(rowCount)] in [\(timeInterval.stringFromTimeInterval())]")
updateMeta(modelName, state: state)
}

debugPrint("􀈄 [\(modelType.modelName)] - importing [\(rowCount)] models")
for i in 0..<rowCount {
if i >= limit || Task.isCancelled { break }
let index = Int64(i)
let row = table.rows[i]
let timeInterval = abs(start.timeIntervalSinceNow)
debugPrint("􂂼 [\(modelName)] - [\(state)] [\(rowCount)] in [\(timeInterval.stringFromTimeInterval())]")
updateMeta(modelName, state: state)
}

debugPrint("􀈄 [\(modelName)] - importing [\(rowCount)] models")

for index in keys {
if Task.isCancelled { break }
let row = table.rows[Int(index)]
update(index: index, modelType, data: row)
count += 1
}
Expand All @@ -232,21 +229,21 @@ extension Database {
try? modelContext.transaction {
for cacheKey in cacheKeys {

guard let cache = cache.caches[cacheKey] else { continue }
guard let modelCache = cache.caches[cacheKey] else { continue }
let start = Date.now
let keys = cache.keys
let keys = modelCache.keys

defer {
let timeInterval = abs(start.timeIntervalSinceNow)
debugPrint("􂂼 [Batch] - inserted [\(cacheKey)] [\(keys.count)] in [\(timeInterval.stringFromTimeInterval())]")
cache.empty()
modelCache.empty()
}

for key in keys {
guard let model = cache[key] else { continue }
guard let model = modelCache[key] else { continue }
modelContext.insert(model)
batchCount += 1
cache.removeValue(for: key)
modelCache.removeValue(for: key)
}
}
}
Expand Down Expand Up @@ -326,13 +323,13 @@ extension Database {
init() {}

/// Warms the cache to a specific size
/// - Parameter size: the size of the cache
/// - Parameter table: the database table data
/// - Returns: a list of models that have been cached.
@discardableResult
func warm<T>(_ size: Int) -> [T] where T: IndexedPersistentModel {
func warm<T>(_ table: Database.Table) -> [T] where T: IndexedPersistentModel {
let cacheKey: CacheKey = T.modelName
let cache = findOrCreateCache(cacheKey)
return cache.warm(size)
return cache.warm(table)
}

/// Finds or creates a model with the specified index and type.
Expand Down Expand Up @@ -368,7 +365,7 @@ extension Database {
fileprivate final class ModelCache: @unchecked Sendable {

/// The backing storage cache.
private lazy var cache: Cache<Int64, any IndexedPersistentModel> = {
fileprivate lazy var cache: Cache<Int64, any IndexedPersistentModel> = {
let cache = Cache<Int64, any IndexedPersistentModel>()
cache.totalCostLimit = cacheTotalCostLimit
cache.evictsObjectsWithDiscardedContent = true
Expand All @@ -383,31 +380,35 @@ extension Database {
/// Initializer.
init() { }

/// Warms the cache up to the specified index size. Any entities that
/// have cache index misses are stubbed out skeletons that can later be filled in with `.update(data:cache:)`.
/// Warms the cache for the specified table. The entities are stubbed out skeletons that can later be filled in with `.update(data:cache:)`.
/// Please note that he models that are inserted into the cache are not inserted into the model context. As an import optimization,
/// all of the models are are inserted via the `.batchInsert()` method.
/// - Parameter size: the upper bounds of the model index size
/// - Parameter table: the database table to cache from
/// - Returns: empty results for now, simply used to infer type from the generic - could be reworked
@discardableResult
func warm<T>(_ size: Int) -> [T] where T: IndexedPersistentModel {
func warm<T>(_ table: Database.Table) -> [T] where T: IndexedPersistentModel {
let cacheKey: CacheKey = T.modelName
let size = table.rows.count
if size <= .zero {
debugPrint("􂂼 [\(cacheKey)] - skipping warm - [\(models.count)] [\(size)]")
return []
}
debugPrint("􁰹 [\(cacheKey)] - warming cache [\(size)]")

var count = 0
let start = Date.now
defer {
let timeInterval = abs(start.timeIntervalSinceNow)
debugPrint("􂂼 [\(cacheKey)] - cache created [\(count)] in [\(timeInterval.stringFromTimeInterval())]")
}

let range: Range<Int64> = 0..<Int64(size)
for index in range {
for i in 0..<size {
let index = Int64(i)
let model: T = .init()
model.index = index
cache[index] = model
count += 1
}
let timeInterval = abs(start.timeIntervalSinceNow)
debugPrint("􂂼 [\(cacheKey)] - cache created [\(size)] in [\(timeInterval.stringFromTimeInterval())]")
return []
}

Expand Down
34 changes: 9 additions & 25 deletions Sources/VimKit/Database+Models.swift
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ public enum ModelImportPriority: Int, Sendable {
case normal
/// The model type has a high priority during the import process.
case high
/// The model type has a very hight priority during the import process.
/// The model type has a very high priority during the import process.
case veryHigh
}

Expand Down Expand Up @@ -66,11 +66,11 @@ extension IndexedPersistentModel {

/// Warms the cache with the specified size.
/// - Parameters:
/// - size: the size of the cache
/// - table: the database table that contains the raw data
/// - cache: the cache to warm
/// - Returns: the cached objects.
static func warm(size: Int, cache: Database.ImportCache) -> [Self] {
cache.warm(size)
static func warm(table: Database.Table, cache: Database.ImportCache) -> [Self] {
cache.warm(table)
}

/// Performs a fetch request for all models in the specified context.
Expand Down Expand Up @@ -539,7 +539,6 @@ extension Database {
public var room: Room?
public var group: Group?
public var workset: Workset?
public var parameters: [Parameter]

/// Returns the elements instance type
public var instanceType: Element? {
Expand All @@ -552,25 +551,10 @@ extension Database {
return results[0]
}

/// Returns a hash of instance parameters grouped by name
public var instanceParameters: [String: [Parameter]] {
var groups = [String: [Parameter]]()
for parameter in parameters {
guard let descriptor = parameter.descriptor else { continue }
if groups[descriptor.group] != nil {
groups[descriptor.group]?.append(parameter)
} else {
groups[descriptor.group] = [parameter]
}
}
return groups
}

/// Initializer.
public required init() {
index = .empty
elementId = .empty
parameters = []
}

public func update(from data: [String: AnyHashable], cache: ImportCache) {
Expand Down Expand Up @@ -890,35 +874,35 @@ extension Database {
}

@Transient
public static let importPriority: ModelImportPriority = .normal
public static let importPriority: ModelImportPriority = .veryHigh

@Attribute(.unique)
public var index: Int64
public var value: String
public var descriptor: ParameterDescriptor?
public var element: Int64

/// Provides a convenience formatted value if the value is pipe delimited.
@Transient
public var formattedValue: String {
value.contains("|") ? String(value.split(separator: "|").last!) : value
}


/// Initializer.
public required init() {
index = .empty
value = .empty
element = .empty
}

public func update(from data: [String: AnyHashable], cache: ImportCache) {
if let idx = data["ParameterDescriptor"] as? Int64, idx != .empty {
descriptor = cache.findOrCreate(idx)
}
value = data["Value"] as? String ?? .empty
if let idx = data["Element"] as? Int64, idx != .empty {
let element: Element = cache.findOrCreate(idx)
element.parameters.append(self)
element = idx
}
value = data["Value"] as? String ?? .empty
}
}

Expand Down
13 changes: 13 additions & 0 deletions Sources/VimKit/Extensions/NSCache+Extensions.swift
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,19 @@ public final class Cache<Key: Hashable, Value>: @unchecked Sendable {
return storage.object(forKey: WrappedKey(key))?.value
}

/// Returns a list of values for the given set of keys
/// - Parameter keys: the set of keys
/// - Returns: a list of values for the given keys
public func values(in keys: Set<Key>) -> [Value] {
lock.lock()
defer { lock.unlock() }
var values: [Value?] = []
for key in keys {
values.append(storage.object(forKey: WrappedKey(key))?.value)
}
return values.compactMap{ $0 }
}

/// Removes the value of the specified key in the cache.
/// - Parameter key: the key to remove
public func removeValue(for key: Key) {
Expand Down
Loading