if (rogue_ref_is_reg(&mov->dst[0].ref) &&
mov->dst[0].ref.reg->class == ROGUE_REG_CLASS_VTXOUT) {
instr = &rogue_UVSW_WRITE(b, mov->dst[0].ref, mov->src[0].ref)->instr;
+ } else if (rogue_ref_is_special_reg(&mov->src[0].ref)) {
+ /* If we're loading a special register, use a movc. */
+ rogue_alu_instr *alu = rogue_MOVC(b,
+ mov->dst[0].ref,
+ rogue_ref_io(ROGUE_IO_NONE),
+ rogue_ref_io(ROGUE_IO_NONE),
+ mov->src[0].ref,
+ rogue_ref_io(ROGUE_IO_NONE));
+ rogue_set_alu_dst_mod(alu, 0, ROGUE_ALU_DST_MOD_E0);
+ rogue_set_alu_dst_mod(alu, 0, ROGUE_ALU_DST_MOD_E1);
+ rogue_set_alu_dst_mod(alu, 0, ROGUE_ALU_DST_MOD_E2);
+ rogue_set_alu_dst_mod(alu, 0, ROGUE_ALU_DST_MOD_E3);
+
+ instr = &alu->instr;
} else {
/* If we're moving an immediate value not in special constants,
* we need to do a bitwise bypass.
if (rogue_ref_is_io(ref) && rogue_ref_get_io(ref) == io)
return;
- /* Leave source feedthroughs in place. */
- if (!is_dst && rogue_io_is_ft(io))
- return;
-
if (alu == ROGUE_ALU_MAIN) {
/* Hookup feedthrough outputs to W0 using IS4. */
if (is_dst && rogue_io_is_ft(io)) {
}
}
+ /* Movc source. */
+ /* TODO: hardcoded to use fte and s1 for now. */
+ if (!is_dst && io == ROGUE_IO_FTE) {
+ enum rogue_io src = ROGUE_IO_S1;
+ *(rogue_instr_group_io_sel_ref(map, ROGUE_IO_IS0)) = rogue_ref_io(src);
+ *(rogue_instr_group_io_sel_ref(map, ROGUE_IO_IS4)) = rogue_ref_io(io);
+ io = src;
+ }
+
/* Pack source */
if (!is_dst && io == ROGUE_IO_IS3) {
enum rogue_io src = ROGUE_IO_S0;
io = src;
}
- /* Movc sources. */
+ /* w0/w1 used as sources. */
if (!is_dst && rogue_io_is_dst(io)) {
enum rogue_io dst_ft =
(io == ROGUE_IO_W0 ? ROGUE_IO_IS4 : ROGUE_IO_IS5);
*regarray_cached = regarray;
regarray->cached = regarray_cached;
- assert(updated);
return updated;
}
return ref->type == ROGUE_REF_TYPE_REG;
}
+static inline bool rogue_ref_is_special_reg(const rogue_ref *ref)
+{
+ return rogue_ref_is_reg(ref) && ref->reg->class == ROGUE_REG_CLASS_SPECIAL;
+}
+
static inline bool rogue_ref_is_regarray(const rogue_ref *ref)
{
return ref->type == ROGUE_REF_TYPE_REGARRAY;
rogue_bitwise_instr_encoding bitwise;
} PACKED rogue_instr_encoding;
+static unsigned rogue_alu_movc_ft(const rogue_ref *ref)
+{
+ switch (rogue_ref_get_io(ref)) {
+ case ROGUE_IO_NONE:
+ case ROGUE_IO_FT0:
+ return MOVW_FT0;
+
+ case ROGUE_IO_FT1:
+ return MOVW_FT1;
+
+ case ROGUE_IO_FT2:
+ return MOVW_FT2;
+
+ case ROGUE_IO_FTE:
+ return MOVW_FTE;
+
+ default:
+ break;
+ }
+
+ unreachable("Invalid source.");
+}
+
#define SM(src_mod) ROGUE_ALU_SRC_MOD_##src_mod
#define DM(dst_mod) ROGUE_ALU_DST_MOD_##dst_mod
#define OM(op_mod) ROGUE_ALU_OP_MOD_##op_mod
bool e3 = rogue_alu_dst_mod_is_set(alu, 0, DM(E3));
bool e_none = !e0 && !e1 && !e2 && !e3;
- switch (rogue_ref_get_io(&alu->src[1].ref)) {
- case ROGUE_IO_FT0:
- instr_encoding->alu.movc.movw0 = MOVW_FT0;
- break;
- case ROGUE_IO_FT1:
- instr_encoding->alu.movc.movw0 = MOVW_FT1;
- break;
- case ROGUE_IO_FT2:
- instr_encoding->alu.movc.movw0 = MOVW_FT2;
- break;
- case ROGUE_IO_FTE:
- instr_encoding->alu.movc.movw0 = MOVW_FTE;
- break;
- default:
- unreachable("Invalid source.");
- }
-
- switch (rogue_ref_get_io(&alu->src[2].ref)) {
- case ROGUE_IO_FT0:
- instr_encoding->alu.movc.movw1 = MOVW_FT0;
- break;
- case ROGUE_IO_FT1:
- instr_encoding->alu.movc.movw1 = MOVW_FT1;
- break;
- case ROGUE_IO_FT2:
- instr_encoding->alu.movc.movw1 = MOVW_FT2;
- break;
- case ROGUE_IO_FTE:
- instr_encoding->alu.movc.movw1 = MOVW_FTE;
- break;
- default:
- unreachable("Invalid source.");
- }
+ instr_encoding->alu.movc.movw0 = rogue_alu_movc_ft(&alu->src[1].ref);
+ instr_encoding->alu.movc.movw1 = rogue_alu_movc_ft(&alu->src[2].ref);
if (instr_size == 2) {
instr_encoding->alu.movc.ext = 1;
if (e_none) {
instr_encoding->alu.movc.maskw0 = MASKW0_EALL;
} else {
- if (e0)
- instr_encoding->alu.movc.maskw0 |= MASKW0_E0;
- if (e1)
- instr_encoding->alu.movc.maskw0 |= MASKW0_E1;
- if (e2)
- instr_encoding->alu.movc.maskw0 |= MASKW0_E2;
- if (e3)
- instr_encoding->alu.movc.maskw0 |= MASKW0_E3;
+ instr_encoding->alu.movc.maskw0 |= e0 ? MASKW0_E0 : 0;
+ instr_encoding->alu.movc.maskw0 |= e1 ? MASKW0_E1 : 0;
+ instr_encoding->alu.movc.maskw0 |= e2 ? MASKW0_E2 : 0;
+ instr_encoding->alu.movc.maskw0 |= e3 ? MASKW0_E3 : 0;
}
}
break;
[1] = T(REG) | T(IO),
},
},
+ /* TODO: Support fully. */
[ROGUE_ALU_OP_MOVC] = { .str = "movc", .num_dsts = 2, .num_srcs = 3,
.supported_phases = P(2_MOV),
- .phase_io[PH(2_MOV)] = { .dst[0] = IO(W0), .dst[1] = IO(W1), .src[1] = IO(FT0), .src[2] = IO(FTE), }, /* TODO: SRC ONES ARE TEMPORARY, SHOULD BE MADE TO MATCH THE TST ONES INSTEAD? */
+ .phase_io[PH(2_MOV)] = { .dst[0] = IO(W0), .src[1] = IO(FTE), },
.supported_dst_mods = {
[0] = DM(E0) | DM(E1) | DM(E2) | DM(E3),
},
- .supported_dst_types = { [0] = T(REG), [1] = T(REG) | T(IO), },
+ .supported_dst_types = { [0] = T(REG) | T(REGARRAY), [1] = T(REG) | T(REGARRAY) | T(IO), },
.supported_src_types = {
[0] = T(IO),
- [1] = T(REG) | T(IO),
- [2] = T(REG) | T(IO),
+ [1] = T(REG) | T(REGARRAY) | T(IO),
+ [2] = T(REG) | T(REGARRAY) | T(IO),
},
},
[ROGUE_ALU_OP_ADD64] = { .str = "add64", .num_dsts = 3, .num_srcs = 5,