use new for loops over iterators (#95)

* use new for loops over iterators

* run github actions on every push
wch-ch32v003
Matt Knight 2 years ago committed by Matt Knight
parent 137b50484a
commit c48ca0e86b

@ -1,9 +1,6 @@
name: ci name: ci
on: on:
push: push:
branches: [ main ]
pull_request:
branches: [ main ]
schedule: schedule:
- cron: "0 7 * * *" - cron: "0 7 * * *"

@ -349,12 +349,9 @@ fn remove_children(db: *Database, id: EntityId) void {
var children_set = children_entry.value; var children_set = children_entry.value;
defer children_set.deinit(db.gpa); defer children_set.deinit(db.gpa);
var it = children_set.iterator(); for (children_set.keys()) |child_id|
while (it.next()) |child_entry| {
const child_id = child_entry.key_ptr.*;
// this will get rid of the parent attr // this will get rid of the parent attr
db.destroy_entity(child_id); db.destroy_entity(child_id);
}
} }
} }
} }
@ -805,13 +802,11 @@ pub fn get_entity_id_by_name(
comptime var group = (tok_it.next() orelse unreachable) ++ "s"; comptime var group = (tok_it.next() orelse unreachable) ++ "s";
comptime var table = (tok_it.next() orelse unreachable) ++ "s"; comptime var table = (tok_it.next() orelse unreachable) ++ "s";
var it = @field(@field(db, group), table).iterator(); return for (@field(@field(db, group), table).keys()) |id| {
return while (it.next()) |entry| { const entry_name = db.attrs.name.get(id) orelse continue;
const entry_id = entry.key_ptr.*;
const entry_name = db.attrs.name.get(entry_id) orelse continue;
if (std.mem.eql(u8, name, entry_name)) { if (std.mem.eql(u8, name, entry_name)) {
assert(db.entity_is(entity_location, entry_id)); assert(db.entity_is(entity_location, id));
return entry_id; return id;
} }
} else error.NameNotFound; } else error.NameNotFound;
} }

@ -174,10 +174,7 @@ fn infer_peripheral_offsets(ctx: *Context) !void {
var type_counts = std.AutoArrayHashMap(EntityId, struct { count: usize, instance_id: EntityId }).init(db.gpa); var type_counts = std.AutoArrayHashMap(EntityId, struct { count: usize, instance_id: EntityId }).init(db.gpa);
defer type_counts.deinit(); defer type_counts.deinit();
var instance_it = db.instances.peripherals.iterator(); for (db.instances.peripherals.keys(), db.instances.peripherals.values()) |instance_id, type_id| {
while (instance_it.next()) |instance_entry| {
const instance_id = instance_entry.key_ptr.*;
const type_id = instance_entry.value_ptr.*;
if (type_counts.getEntry(type_id)) |entry| if (type_counts.getEntry(type_id)) |entry|
entry.value_ptr.count += 1 entry.value_ptr.count += 1
else else
@ -187,13 +184,13 @@ fn infer_peripheral_offsets(ctx: *Context) !void {
}); });
} }
var type_it = type_counts.iterator(); for (type_counts.keys(), type_counts.values()) |type_id, result| {
while (type_it.next()) |type_entry| if (type_entry.value_ptr.count == 1) { if (result.count != 1)
const type_id = type_entry.key_ptr.*; continue;
const instance_id = type_entry.value_ptr.instance_id;
infer_peripheral_offset(ctx, type_id, instance_id) catch |err| infer_peripheral_offset(ctx, type_id, result.instance_id) catch |err|
log.warn("failed to infer peripheral instance offset: {}", .{err}); log.warn("failed to infer peripheral instance offset: {}", .{err});
}; }
} }
fn infer_peripheral_offset(ctx: *Context, type_id: EntityId, instance_id: EntityId) !void { fn infer_peripheral_offset(ctx: *Context, type_id: EntityId, instance_id: EntityId) !void {
@ -203,9 +200,7 @@ fn infer_peripheral_offset(ctx: *Context, type_id: EntityId, instance_id: Entity
var min_offset: ?u64 = null; var min_offset: ?u64 = null;
// first find the min offset of all the registers for this peripheral // first find the min offset of all the registers for this peripheral
const register_set = db.children.registers.get(type_id) orelse return; const register_set = db.children.registers.get(type_id) orelse return;
var register_it = register_set.iterator(); for (register_set.keys()) |register_id| {
while (register_it.next()) |register_entry| {
const register_id = register_entry.key_ptr.*;
const offset = db.attrs.offset.get(register_id) orelse continue; const offset = db.attrs.offset.get(register_id) orelse continue;
if (min_offset == null) if (min_offset == null)
@ -220,9 +215,7 @@ fn infer_peripheral_offset(ctx: *Context, type_id: EntityId, instance_id: Entity
const instance_offset: u64 = db.attrs.offset.get(instance_id) orelse 0; const instance_offset: u64 = db.attrs.offset.get(instance_id) orelse 0;
try db.attrs.offset.put(db.gpa, instance_id, instance_offset + min_offset.?); try db.attrs.offset.put(db.gpa, instance_id, instance_offset + min_offset.?);
register_it = register_set.iterator(); for (register_set.keys()) |register_id| {
while (register_it.next()) |register_entry| {
const register_id = register_entry.key_ptr.*;
if (db.attrs.offset.getEntry(register_id)) |offset_entry| if (db.attrs.offset.getEntry(register_id)) |offset_entry|
offset_entry.value_ptr.* -= min_offset.?; offset_entry.value_ptr.* -= min_offset.?;
} }
@ -232,9 +225,7 @@ fn infer_peripheral_offset(ctx: *Context, type_id: EntityId, instance_id: Entity
// it, and determine the size of the enum // it, and determine the size of the enum
fn infer_enum_sizes(ctx: *Context) !void { fn infer_enum_sizes(ctx: *Context) !void {
const db = ctx.db; const db = ctx.db;
var enum_it = db.types.enums.iterator(); for (db.types.enums.keys()) |enum_id| {
while (enum_it.next()) |entry| {
const enum_id = entry.key_ptr.*;
infer_enum_size(db, enum_id) catch |err| { infer_enum_size(db, enum_id) catch |err| {
log.warn("failed to infer size of enum '{s}': {}", .{ log.warn("failed to infer size of enum '{s}': {}", .{
db.attrs.name.get(enum_id) orelse "<unknown>", db.attrs.name.get(enum_id) orelse "<unknown>",
@ -248,9 +239,7 @@ fn infer_enum_size(db: *Database, enum_id: EntityId) !void {
const max_value = blk: { const max_value = blk: {
const enum_fields = db.children.enum_fields.get(enum_id) orelse return error.MissingEnumFields; const enum_fields = db.children.enum_fields.get(enum_id) orelse return error.MissingEnumFields;
var ret: u32 = 0; var ret: u32 = 0;
var it = enum_fields.iterator(); for (enum_fields.keys()) |enum_field_id| {
while (it.next()) |entry| {
const enum_field_id = entry.key_ptr.*;
const value = db.types.enum_fields.get(enum_field_id).?; const value = db.types.enum_fields.get(enum_field_id).?;
ret = std.math.max(ret, value); ret = std.math.max(ret, value);
} }
@ -261,10 +250,7 @@ fn infer_enum_size(db: *Database, enum_id: EntityId) !void {
var field_sizes = std.ArrayList(u64).init(db.gpa); var field_sizes = std.ArrayList(u64).init(db.gpa);
defer field_sizes.deinit(); defer field_sizes.deinit();
var it = db.attrs.@"enum".iterator(); for (db.attrs.@"enum".keys(), db.attrs.@"enum".values()) |field_id, other_enum_id| {
while (it.next()) |entry| {
const field_id = entry.key_ptr.*;
const other_enum_id = entry.value_ptr.*;
assert(db.entity_is("type.field", field_id)); assert(db.entity_is("type.field", field_id));
if (other_enum_id != enum_id) if (other_enum_id != enum_id)
continue; continue;
@ -509,9 +495,7 @@ fn assign_modes_to_entity(
var tok_it = std.mem.tokenize(u8, mode_names, " "); var tok_it = std.mem.tokenize(u8, mode_names, " ");
while (tok_it.next()) |mode_str| { while (tok_it.next()) |mode_str| {
var it = mode_set.iterator(); for (mode_set.keys()) |mode_id| {
while (it.next()) |mode_entry| {
const mode_id = mode_entry.key_ptr.*;
if (db.attrs.name.get(mode_id)) |name| if (db.attrs.name.get(mode_id)) |name|
if (std.mem.eql(u8, name, mode_str)) { if (std.mem.eql(u8, name, mode_str)) {
const result = try db.attrs.modes.getOrPut(db.gpa, id); const result = try db.attrs.modes.getOrPut(db.gpa, id);
@ -728,9 +712,7 @@ fn load_field(ctx: *Context, node: xml.Node, register_id: EntityId) !void {
// values _should_ match to a known enum // values _should_ match to a known enum
// TODO: namespace the enum to the appropriate register, register_group, or peripheral // TODO: namespace the enum to the appropriate register, register_group, or peripheral
if (node.get_attribute("values")) |values| { if (node.get_attribute("values")) |values| {
var it = db.types.enums.iterator(); for (db.types.enums.keys()) |enum_id| {
while (it.next()) |entry| {
const enum_id = entry.key_ptr.*;
const enum_name = db.attrs.name.get(enum_id) orelse continue; const enum_name = db.attrs.name.get(enum_id) orelse continue;
if (std.mem.eql(u8, enum_name, values)) { if (std.mem.eql(u8, enum_name, values)) {
log.debug("{}: assigned enum '{s}'", .{ id, enum_name }); log.debug("{}: assigned enum '{s}'", .{ id, enum_name });
@ -832,11 +814,10 @@ fn load_module_instances(
const db = ctx.db; const db = ctx.db;
const module_name = node.get_attribute("name") orelse return error.MissingModuleName; const module_name = node.get_attribute("name") orelse return error.MissingModuleName;
const type_id = blk: { const type_id = blk: {
var periph_it = db.types.peripherals.iterator(); for (db.types.peripherals.keys()) |peripheral_id| {
while (periph_it.next()) |entry| { if (db.attrs.name.get(peripheral_id)) |peripheral_name|
if (db.attrs.name.get(entry.key_ptr.*)) |entry_name| if (std.mem.eql(u8, peripheral_name, module_name))
if (std.mem.eql(u8, entry_name, module_name)) break :blk peripheral_id;
break :blk entry.key_ptr.*;
} else { } else {
log.warn("failed to find the '{s}' peripheral type", .{ log.warn("failed to find the '{s}' peripheral type", .{
module_name, module_name,
@ -939,9 +920,7 @@ fn load_module_instance_from_register_group(
const name_in_module = register_group_node.get_attribute("name-in-module") orelse return error.MissingNameInModule; const name_in_module = register_group_node.get_attribute("name-in-module") orelse return error.MissingNameInModule;
const register_group_id = blk: { const register_group_id = blk: {
const register_group_set = db.children.register_groups.get(peripheral_type_id) orelse return error.MissingRegisterGroup; const register_group_set = db.children.register_groups.get(peripheral_type_id) orelse return error.MissingRegisterGroup;
var it = register_group_set.iterator(); break :blk for (register_group_set.keys()) |register_group_id| {
break :blk while (it.next()) |entry| {
const register_group_id = entry.key_ptr.*;
const register_group_name = db.attrs.name.get(register_group_id) orelse continue; const register_group_name = db.attrs.name.get(register_group_id) orelse continue;
if (std.mem.eql(u8, name_in_module, register_group_name)) if (std.mem.eql(u8, name_in_module, register_group_name))
break register_group_id; break register_group_id;

@ -63,9 +63,7 @@ fn write_devices(db: Database, writer: anytype) !void {
); );
// TODO: order devices alphabetically // TODO: order devices alphabetically
var it = db.instances.devices.iterator(); for (db.instances.devices.keys()) |device_id| {
while (it.next()) |entry| {
const device_id = entry.key_ptr.*;
write_device(db, device_id, writer) catch |err| { write_device(db, device_id, writer) catch |err| {
log.warn("failed to write device: {}", .{err}); log.warn("failed to write device: {}", .{err});
}; };
@ -149,9 +147,7 @@ fn write_device(db: Database, device_id: EntityId, out_writer: anytype) !void {
var list = std.ArrayList(EntityWithOffset).init(db.gpa); var list = std.ArrayList(EntityWithOffset).init(db.gpa);
defer list.deinit(); defer list.deinit();
var it = peripheral_set.iterator(); for (peripheral_set.keys()) |peripheral_id| {
while (it.next()) |entry| {
const peripheral_id = entry.key_ptr.*;
const offset = db.attrs.offset.get(peripheral_id) orelse return error.MissingPeripheralInstanceOffset; const offset = db.attrs.offset.get(peripheral_id) orelse return error.MissingPeripheralInstanceOffset;
try list.append(.{ .id = peripheral_id, .offset = offset }); try list.append(.{ .id = peripheral_id, .offset = offset });
} }
@ -280,9 +276,7 @@ fn write_peripheral_instance(db: Database, instance_id: EntityId, offset: u64, o
// rendered in the `types` namespace they need a name // rendered in the `types` namespace they need a name
fn has_top_level_named_types(db: Database) bool { fn has_top_level_named_types(db: Database) bool {
inline for (@typeInfo(@TypeOf(db.types)).Struct.fields) |field| { inline for (@typeInfo(@TypeOf(db.types)).Struct.fields) |field| {
var it = @field(db.types, field.name).iterator(); for (@field(db.types, field.name).keys()) |id| {
while (it.next()) |entry| {
const id = entry.key_ptr.*;
if (!db.attrs.parent.contains(id) and if (!db.attrs.parent.contains(id) and
db.attrs.name.contains(id)) db.attrs.name.contains(id))
{ {
@ -307,10 +301,7 @@ fn write_types(db: Database, writer: anytype) !void {
if (db.types.peripherals.count() > 0) { if (db.types.peripherals.count() > 0) {
try writer.writeAll("pub const peripherals = struct {\n"); try writer.writeAll("pub const peripherals = struct {\n");
// TODO: order the peripherals alphabetically? for (db.types.peripherals.keys()) |peripheral_id| {
var it = db.types.peripherals.iterator();
while (it.next()) |entry| {
const peripheral_id = entry.key_ptr.*;
write_peripheral(db, peripheral_id, writer) catch |err| { write_peripheral(db, peripheral_id, writer) catch |err| {
log.warn("failed to generate peripheral '{s}': {}", .{ log.warn("failed to generate peripheral '{s}': {}", .{
db.attrs.name.get(peripheral_id) orelse "<unknown>", db.attrs.name.get(peripheral_id) orelse "<unknown>",
@ -335,9 +326,7 @@ fn is_peripheral_zero_sized(db: Database, peripheral_id: EntityId) bool {
} }
return if (db.children.register_groups.get(peripheral_id)) |register_group_set| blk: { return if (db.children.register_groups.get(peripheral_id)) |register_group_set| blk: {
var it = register_group_set.iterator(); for (register_group_set.keys()) |register_group_id| {
while (it.next()) |entry| {
const register_group_id = entry.key_ptr.*;
if (db.attrs.offset.contains(register_group_id)) if (db.attrs.offset.contains(register_group_id))
break :blk false; break :blk false;
} }
@ -360,9 +349,8 @@ fn write_peripheral(
// for now only serialize flat peripherals with no register groups // for now only serialize flat peripherals with no register groups
// TODO: expand this // TODO: expand this
if (db.children.register_groups.get(peripheral_id)) |register_group_set| { if (db.children.register_groups.get(peripheral_id)) |register_group_set| {
var it = register_group_set.iterator(); for (register_group_set.keys()) |register_group_id| {
while (it.next()) |entry| { if (db.attrs.offset.contains(register_group_id)) {
if (db.attrs.offset.contains(entry.key_ptr.*)) {
log.warn("TODO: implement register groups with offset in peripheral type ({s})", .{name}); log.warn("TODO: implement register groups with offset in peripheral type ({s})", .{name});
return; return;
} }
@ -402,10 +390,7 @@ fn write_peripheral(
// namespaced registers // namespaced registers
if (db.children.register_groups.get(peripheral_id)) |register_group_set| { if (db.children.register_groups.get(peripheral_id)) |register_group_set| {
var it = register_group_set.iterator(); for (register_group_set.keys()) |register_group_id| {
while (it.next()) |entry| {
const register_group_id = entry.key_ptr.*;
// a register group with an offset means that it has a location within the peripheral // a register group with an offset means that it has a location within the peripheral
if (db.attrs.offset.contains(register_group_id)) if (db.attrs.offset.contains(register_group_id))
continue; continue;
@ -432,10 +417,7 @@ fn write_newline_if_written(writer: anytype, written: *bool) !void {
} }
fn write_enums(db: Database, written: *bool, enum_set: EntitySet, writer: anytype) !void { fn write_enums(db: Database, written: *bool, enum_set: EntitySet, writer: anytype) !void {
var it = enum_set.iterator(); for (enum_set.keys()) |enum_id| {
while (it.next()) |entry| {
const enum_id = entry.key_ptr.*;
try write_newline_if_written(writer, written); try write_newline_if_written(writer, written);
try write_enum(db, enum_id, writer); try write_enum(db, enum_id, writer);
} }
@ -472,11 +454,8 @@ fn write_enum_fields(db: Database, enum_id: u32, out_writer: anytype) !void {
const writer = buffer.writer(); const writer = buffer.writer();
const size = db.attrs.size.get(enum_id) orelse return error.MissingEnumSize; const size = db.attrs.size.get(enum_id) orelse return error.MissingEnumSize;
const field_set = db.children.enum_fields.get(enum_id) orelse return error.MissingEnumFields; const field_set = db.children.enum_fields.get(enum_id) orelse return error.MissingEnumFields;
var it = field_set.iterator(); for (field_set.keys()) |enum_field_id|
while (it.next()) |entry| {
const enum_field_id = entry.key_ptr.*;
try write_enum_field(db, enum_field_id, size, writer); try write_enum_field(db, enum_field_id, size, writer);
}
// if the enum doesn't completely fill the integer then make it a non-exhaustive enum // if the enum doesn't completely fill the integer then make it a non-exhaustive enum
if (field_set.count() < std.math.pow(u64, 2, size)) if (field_set.count() < std.math.pow(u64, 2, size))
@ -513,9 +492,7 @@ fn write_mode_enum_and_fn(
const writer = buffer.writer(); const writer = buffer.writer();
try writer.writeAll("pub const Mode = enum {\n"); try writer.writeAll("pub const Mode = enum {\n");
var it = mode_set.iterator(); for (mode_set.keys()) |mode_id| {
while (it.next()) |entry| {
const mode_id = entry.key_ptr.*;
const mode_name = db.attrs.name.get(mode_id) orelse unreachable; const mode_name = db.attrs.name.get(mode_id) orelse unreachable;
try writer.print("{s},\n", .{std.zig.fmtId(mode_name)}); try writer.print("{s},\n", .{std.zig.fmtId(mode_name)});
} }
@ -527,9 +504,7 @@ fn write_mode_enum_and_fn(
\\ \\
); );
it = mode_set.iterator(); for (mode_set.keys()) |mode_id| {
while (it.next()) |entry| {
const mode_id = entry.key_ptr.*;
const mode_name = db.attrs.name.get(mode_id) orelse unreachable; const mode_name = db.attrs.name.get(mode_id) orelse unreachable;
var components = std.ArrayList([]const u8).init(db.gpa); var components = std.ArrayList([]const u8).init(db.gpa);
@ -590,18 +565,14 @@ fn write_registers_with_modes(
defer buffer.deinit(); defer buffer.deinit();
const writer = buffer.writer(); const writer = buffer.writer();
var it = mode_set.iterator(); for (mode_set.keys()) |mode_id| {
while (it.next()) |entry| {
const mode_id = entry.key_ptr.*;
const mode_name = db.attrs.name.get(mode_id) orelse unreachable; const mode_name = db.attrs.name.get(mode_id) orelse unreachable;
// filter registers for this mode // filter registers for this mode
var moded_registers = std.ArrayList(EntityWithOffset).init(allocator); var moded_registers = std.ArrayList(EntityWithOffset).init(allocator);
for (registers.items) |register| { for (registers.items) |register| {
if (db.attrs.modes.get(register.id)) |reg_mode_set| { if (db.attrs.modes.get(register.id)) |reg_mode_set| {
var reg_mode_it = reg_mode_set.iterator(); for (reg_mode_set.keys()) |reg_mode_id| {
while (reg_mode_it.next()) |reg_mode_entry| {
const reg_mode_id = reg_mode_entry.key_ptr.*;
if (reg_mode_id == mode_id) if (reg_mode_id == mode_id)
try moded_registers.append(register); try moded_registers.append(register);
} }
@ -720,14 +691,11 @@ fn write_register(
var fields = std.ArrayList(EntityWithOffset).init(db.gpa); var fields = std.ArrayList(EntityWithOffset).init(db.gpa);
defer fields.deinit(); defer fields.deinit();
var it = field_set.iterator(); for (field_set.keys()) |field_id|
while (it.next()) |entry| {
const field_id = entry.key_ptr.*;
try fields.append(.{ try fields.append(.{
.id = field_id, .id = field_id,
.offset = db.attrs.offset.get(field_id) orelse continue, .offset = db.attrs.offset.get(field_id) orelse continue,
}); });
}
std.sort.sort(EntityWithOffset, fields.items, {}, EntityWithOffset.less_than); std.sort.sort(EntityWithOffset, fields.items, {}, EntityWithOffset.less_than);
try writer.print("{s}: {s}mmio.Mmio(packed struct(u{}) {{\n", .{ try writer.print("{s}: {s}mmio.Mmio(packed struct(u{}) {{\n", .{
@ -885,9 +853,7 @@ fn get_ordered_register_list(
// get list of registers // get list of registers
if (db.children.registers.get(parent_id)) |register_set| { if (db.children.registers.get(parent_id)) |register_set| {
var it = register_set.iterator(); for (register_set.keys()) |register_id| {
while (it.next()) |entry| {
const register_id = entry.key_ptr.*;
const offset = db.attrs.offset.get(register_id) orelse continue; const offset = db.attrs.offset.get(register_id) orelse continue;
try registers.append(.{ .id = register_id, .offset = offset }); try registers.append(.{ .id = register_id, .offset = offset });
} }

@ -159,10 +159,7 @@ pub fn load_into_db(db: *Database, text: []const u8) !void {
fn resolve_enums(ctx: *LoadContext) !void { fn resolve_enums(ctx: *LoadContext) !void {
const db = ctx.db; const db = ctx.db;
var it = ctx.enum_refs.iterator(); for (ctx.enum_refs.keys(), ctx.enum_refs.values()) |id, ref| {
while (it.next()) |entry| {
const id = entry.key_ptr.*;
const ref = entry.value_ptr.*;
const enum_id = try ref_to_id(db.*, ref); const enum_id = try ref_to_id(db.*, ref);
//assert(db.entityIs("type.enum", enum_id)); //assert(db.entityIs("type.enum", enum_id));
try ctx.db.attrs.@"enum".put(db.gpa, id, enum_id); try ctx.db.attrs.@"enum".put(db.gpa, id, enum_id);
@ -194,9 +191,7 @@ fn ref_to_id(db: Database, ref: []const u8) !EntityId {
tmp_id = tmp_id: inline for (@typeInfo(TypeOfField(Database, "types")).Struct.fields) |field| { tmp_id = tmp_id: inline for (@typeInfo(TypeOfField(Database, "types")).Struct.fields) |field| {
const other_type = try string_to_entity_type(field.name); const other_type = try string_to_entity_type(field.name);
if (entity_type == other_type) { if (entity_type == other_type) {
var entity_it = @field(db.types, field.name).iterator(); for (@field(db.types, field.name).keys()) |id| {
while (entity_it.next()) |entry| {
const id = entry.key_ptr.*;
if (db.attrs.parent.contains(id)) if (db.attrs.parent.contains(id))
continue; continue;
@ -212,9 +207,7 @@ fn ref_to_id(db: Database, ref: []const u8) !EntityId {
const other_type = try string_to_entity_type(field.name); const other_type = try string_to_entity_type(field.name);
if (entity_type == other_type) { if (entity_type == other_type) {
if (@field(db.children, field.name).get(tmp_id.?)) |children| { if (@field(db.children, field.name).get(tmp_id.?)) |children| {
var child_it = children.iterator(); for (children.keys()) |child_id| {
while (child_it.next()) |child_entry| {
const child_id = child_entry.key_ptr.*;
if (db.attrs.name.get(child_id)) |other_name| { if (db.attrs.name.get(child_id)) |other_name| {
if (std.mem.eql(u8, name, other_name)) if (std.mem.eql(u8, name, other_name))
break :tmp_id child_id; break :tmp_id child_id;
@ -240,12 +233,8 @@ fn load_types(ctx: *LoadContext, types: json.ObjectMap) !void {
} }
fn load_peripherals(ctx: *LoadContext, peripherals: json.ObjectMap) !void { fn load_peripherals(ctx: *LoadContext, peripherals: json.ObjectMap) !void {
var it = peripherals.iterator(); for (peripherals.keys(), peripherals.values()) |name, peripheral|
while (it.next()) |entry| {
const name = entry.key_ptr.*;
const peripheral = entry.value_ptr.*;
try load_peripheral(ctx, name, try get_object(peripheral)); try load_peripheral(ctx, name, try get_object(peripheral));
}
} }
fn load_peripheral( fn load_peripheral(
@ -304,12 +293,8 @@ fn load_entities(comptime load_fn: LoadFn) LoadMultipleFn {
parent_id: EntityId, parent_id: EntityId,
entities: json.ObjectMap, entities: json.ObjectMap,
) LoadError!void { ) LoadError!void {
var it = entities.iterator(); for (entities.keys(), entities.values()) |name, entity|
while (it.next()) |entry| {
const name = entry.key_ptr.*;
const entity = entry.value_ptr.*;
try load_fn(ctx, parent_id, name, try get_object(entity)); try load_fn(ctx, parent_id, name, try get_object(entity));
}
} }
}.tmp; }.tmp;
} }
@ -333,11 +318,7 @@ fn load_children(
parent_id: EntityId, parent_id: EntityId,
children: json.ObjectMap, children: json.ObjectMap,
) LoadError!void { ) LoadError!void {
var it = children.iterator(); for (children.keys(), children.values()) |child_type, child_map| {
while (it.next()) |entry| {
const child_type = entry.key_ptr.*;
const child_map = entry.value_ptr.*;
inline for (@typeInfo(TypeOfField(Database, "children")).Struct.fields) |field| { inline for (@typeInfo(TypeOfField(Database, "children")).Struct.fields) |field| {
if (std.mem.eql(u8, child_type, field.name)) { if (std.mem.eql(u8, child_type, field.name)) {
if (@hasDecl(load_fns, field.name)) if (@hasDecl(load_fns, field.name))
@ -526,12 +507,8 @@ fn load_enum_field(
} }
fn load_devices(ctx: *LoadContext, devices: json.ObjectMap) !void { fn load_devices(ctx: *LoadContext, devices: json.ObjectMap) !void {
var it = devices.iterator(); for (devices.keys(), devices.values()) |name, device|
while (it.next()) |entry| {
const name = entry.key_ptr.*;
const device = entry.value_ptr.*;
try load_device(ctx, name, try get_object(device)); try load_device(ctx, name, try get_object(device));
}
} }
fn load_device(ctx: *LoadContext, name: []const u8, device: json.ObjectMap) !void { fn load_device(ctx: *LoadContext, name: []const u8, device: json.ObjectMap) !void {
@ -559,10 +536,8 @@ fn load_device(ctx: *LoadContext, name: []const u8, device: json.ObjectMap) !voi
fn load_properties(ctx: *LoadContext, device_id: EntityId, properties: json.ObjectMap) !void { fn load_properties(ctx: *LoadContext, device_id: EntityId, properties: json.ObjectMap) !void {
const db = ctx.db; const db = ctx.db;
var it = properties.iterator(); for (properties.keys(), properties.values()) |key, json_value| {
while (it.next()) |entry| { const value = switch (json_value) {
const key = entry.key_ptr.*;
const value = switch (entry.value_ptr.*) {
.String => |str| str, .String => |str| str,
else => return error.InvalidJsonType, else => return error.InvalidJsonType,
}; };
@ -613,14 +588,8 @@ pub fn to_json(db: Database) !json.ValueTree {
var types = json.ObjectMap.init(allocator); var types = json.ObjectMap.init(allocator);
var devices = json.ObjectMap.init(allocator); var devices = json.ObjectMap.init(allocator);
var device_it = db.instances.devices.iterator(); for (db.instances.devices.keys()) |device_id|
while (device_it.next()) |entry| try populate_device(db, arena, &devices, device_id);
try populate_device(
db,
arena,
&devices,
entry.key_ptr.*,
);
try root.put("version", .{ .String = schema_version }); try root.put("version", .{ .String = schema_version });
try populate_types(db, arena, &types); try populate_types(db, arena, &types);
@ -643,12 +612,11 @@ fn populate_types(
) !void { ) !void {
const allocator = arena.allocator(); const allocator = arena.allocator();
var peripherals = json.ObjectMap.init(allocator); var peripherals = json.ObjectMap.init(allocator);
var it = db.types.peripherals.iterator();
while (it.next()) |entry| { for (db.types.peripherals.keys()) |peripheral_id| {
const periph_id = entry.key_ptr.*; const name = db.attrs.name.get(peripheral_id) orelse continue;
const name = db.attrs.name.get(periph_id) orelse continue;
var typ = json.ObjectMap.init(allocator); var typ = json.ObjectMap.init(allocator);
try populate_type(db, arena, periph_id, &typ); try populate_type(db, arena, peripheral_id, &typ);
try peripherals.put(name, .{ .Object = typ }); try peripherals.put(name, .{ .Object = typ });
} }
@ -707,9 +675,7 @@ fn populate_type(
if (db.attrs.modes.get(id)) |modeset| { if (db.attrs.modes.get(id)) |modeset| {
var modearray = json.Array.init(allocator); var modearray = json.Array.init(allocator);
var it = modeset.iterator(); for (modeset.keys()) |mode_id| {
while (it.next()) |entry| {
const mode_id = entry.key_ptr.*;
if (db.attrs.name.contains(mode_id)) { if (db.attrs.name.contains(mode_id)) {
const ref = try id_to_ref( const ref = try id_to_ref(
arena.allocator(), arena.allocator(),
@ -737,9 +703,7 @@ fn populate_type(
if (@field(db.children, field.name).get(id)) |set| { if (@field(db.children, field.name).get(id)) |set| {
assert(set.count() > 0); assert(set.count() > 0);
var it = set.iterator(); for (set.keys()) |child_id| {
while (it.next()) |entry| {
const child_id = entry.key_ptr.*;
const name = db.attrs.name.get(child_id) orelse continue; const name = db.attrs.name.get(child_id) orelse continue;
var child_type = json.ObjectMap.init(allocator); var child_type = json.ObjectMap.init(allocator);
try populate_type(db, arena, child_id, &child_type); try populate_type(db, arena, child_id, &child_type);
@ -780,15 +744,8 @@ fn populate_device(
// TODO: link peripherals to device // TODO: link peripherals to device
var peripherals = json.ObjectMap.init(allocator); var peripherals = json.ObjectMap.init(allocator);
var periph_it = db.instances.peripherals.iterator(); for (db.instances.peripherals.keys(), db.instances.peripherals.values()) |instance_id, type_id|
while (periph_it.next()) |entry| try populate_peripheral(db, arena, &peripherals, instance_id, type_id);
try populate_peripheral(
db,
arena,
&peripherals,
entry.key_ptr.*,
entry.value_ptr.*,
);
const arch = db.instances.devices.get(id).?.arch; const arch = db.instances.devices.get(id).?.arch;
try device.put("arch", .{ .String = arch.to_string() }); try device.put("arch", .{ .String = arch.to_string() });

@ -165,11 +165,7 @@ pub fn load_into_db(db: *Database, doc: xml.Doc) !void {
load_peripheral(&ctx, peripheral_node, device_id) catch |err| load_peripheral(&ctx, peripheral_node, device_id) catch |err|
log.warn("failed to load peripheral: {}", .{err}); log.warn("failed to load peripheral: {}", .{err});
var derive_it = ctx.derived_entities.iterator(); for (ctx.derived_entities.keys(), ctx.derived_entities.values()) |id, derived_name| {
while (derive_it.next()) |derived_entry| {
const id = derived_entry.key_ptr.*;
const derived_name = derived_entry.value_ptr.*;
derive_entity(ctx, id, derived_name) catch |err| { derive_entity(ctx, id, derived_name) catch |err| {
log.warn("failed to derive entity {} from {s}: {}", .{ log.warn("failed to derive entity {} from {s}: {}", .{
id, id,
@ -262,10 +258,7 @@ pub fn derive_entity(ctx: Context, id: EntityId, derived_name: []const u8) !void
if (try db.instances.peripherals.fetchPut(db.gpa, id, base_id)) |entry| { if (try db.instances.peripherals.fetchPut(db.gpa, id, base_id)) |entry| {
const maybe_remove_peripheral_id = entry.value; const maybe_remove_peripheral_id = entry.value;
var it = db.instances.peripherals.iterator(); for (db.instances.peripherals.keys()) |used_peripheral_id| {
while (it.next()) |instance_entry| {
const used_peripheral_id = instance_entry.value_ptr.*;
// if there is a match don't delete the entity // if there is a match don't delete the entity
if (used_peripheral_id == maybe_remove_peripheral_id) if (used_peripheral_id == maybe_remove_peripheral_id)
break; break;

@ -90,9 +90,7 @@ pub fn expect_equal_databases(
expected: Database, expected: Database,
actual: Database, actual: Database,
) !void { ) !void {
var it = expected.types.peripherals.iterator(); for (expected.types.peripherals.keys()) |peripheral_id| {
while (it.next()) |entry| {
const peripheral_id = entry.key_ptr.*;
const name = expected.attrs.name.get(peripheral_id) orelse unreachable; const name = expected.attrs.name.get(peripheral_id) orelse unreachable;
std.log.debug("peripheral: {s}", .{name}); std.log.debug("peripheral: {s}", .{name});
const expected_id = try expected.get_entity_id_by_name("type.peripheral", name); const expected_id = try expected.get_entity_id_by_name("type.peripheral", name);

Loading…
Cancel
Save