1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/torque/cc-generator.h"
6
7 #include "src/common/globals.h"
8 #include "src/torque/global-context.h"
9 #include "src/torque/type-oracle.h"
10 #include "src/torque/types.h"
11 #include "src/torque/utils.h"
12
13 namespace v8 {
14 namespace internal {
15 namespace torque {
16
EmitGraph(Stack<std::string> parameters)17 base::Optional<Stack<std::string>> CCGenerator::EmitGraph(
18 Stack<std::string> parameters) {
19 for (BottomOffset i = {0}; i < parameters.AboveTop(); ++i) {
20 SetDefinitionVariable(DefinitionLocation::Parameter(i.offset),
21 parameters.Peek(i));
22 }
23
24 // Redirect the output of non-declarations into a buffer and only output
25 // declarations right away.
26 std::stringstream out_buffer;
27 std::ostream* old_out = out_;
28 out_ = &out_buffer;
29
30 EmitInstruction(GotoInstruction{cfg_.start()}, ¶meters);
31
32 for (Block* block : cfg_.blocks()) {
33 if (cfg_.end() && *cfg_.end() == block) continue;
34 if (block->IsDead()) continue;
35 EmitBlock(block);
36 }
37
38 base::Optional<Stack<std::string>> result;
39 if (cfg_.end()) {
40 result = EmitBlock(*cfg_.end());
41 }
42
43 // All declarations have been printed now, so we can append the buffered
44 // output and redirect back to the original output stream.
45 out_ = old_out;
46 out() << out_buffer.str();
47
48 return result;
49 }
50
EmitBlock(const Block * block)51 Stack<std::string> CCGenerator::EmitBlock(const Block* block) {
52 out() << "\n";
53 out() << " " << BlockName(block) << ":\n";
54
55 Stack<std::string> stack;
56
57 for (BottomOffset i = {0}; i < block->InputTypes().AboveTop(); ++i) {
58 const auto& def = block->InputDefinitions().Peek(i);
59 stack.Push(DefinitionToVariable(def));
60 if (def.IsPhiFromBlock(block)) {
61 decls() << " "
62 << (is_cc_debug_ ? block->InputTypes().Peek(i)->GetDebugType()
63 : block->InputTypes().Peek(i)->GetRuntimeType())
64 << " " << stack.Top() << "{}; USE(" << stack.Top() << ");\n";
65 }
66 }
67
68 for (const Instruction& instruction : block->instructions()) {
69 TorqueCodeGenerator::EmitInstruction(instruction, &stack);
70 }
71 return stack;
72 }
73
EmitSourcePosition(SourcePosition pos,bool always_emit)74 void CCGenerator::EmitSourcePosition(SourcePosition pos, bool always_emit) {
75 const std::string& file = SourceFileMap::AbsolutePath(pos.source);
76 if (always_emit || !previous_position_.CompareStartIgnoreColumn(pos)) {
77 // Lines in Torque SourcePositions are zero-based, while the
78 // CodeStubAssembler and downwind systems are one-based.
79 out() << " // " << file << ":" << (pos.start.line + 1) << "\n";
80 previous_position_ = pos;
81 }
82 }
83
EmitInstruction(const PushUninitializedInstruction & instruction,Stack<std::string> * stack)84 void CCGenerator::EmitInstruction(
85 const PushUninitializedInstruction& instruction,
86 Stack<std::string>* stack) {
87 ReportError("Not supported in C++ output: PushUninitialized");
88 }
89
EmitInstruction(const PushBuiltinPointerInstruction & instruction,Stack<std::string> * stack)90 void CCGenerator::EmitInstruction(
91 const PushBuiltinPointerInstruction& instruction,
92 Stack<std::string>* stack) {
93 ReportError("Not supported in C++ output: PushBuiltinPointer");
94 }
95
EmitInstruction(const NamespaceConstantInstruction & instruction,Stack<std::string> * stack)96 void CCGenerator::EmitInstruction(
97 const NamespaceConstantInstruction& instruction,
98 Stack<std::string>* stack) {
99 ReportError("Not supported in C++ output: NamespaceConstantInstruction");
100 }
101
ProcessArgumentsCommon(const TypeVector & parameter_types,std::vector<std::string> constexpr_arguments,Stack<std::string> * stack)102 std::vector<std::string> CCGenerator::ProcessArgumentsCommon(
103 const TypeVector& parameter_types,
104 std::vector<std::string> constexpr_arguments, Stack<std::string>* stack) {
105 std::vector<std::string> args;
106 for (auto it = parameter_types.rbegin(); it != parameter_types.rend(); ++it) {
107 const Type* type = *it;
108 if (type->IsConstexpr()) {
109 args.push_back(std::move(constexpr_arguments.back()));
110 constexpr_arguments.pop_back();
111 } else {
112 std::stringstream s;
113 size_t slot_count = LoweredSlotCount(type);
114 VisitResult arg = VisitResult(type, stack->TopRange(slot_count));
115 EmitCCValue(arg, *stack, s);
116 args.push_back(s.str());
117 stack->PopMany(slot_count);
118 }
119 }
120 std::reverse(args.begin(), args.end());
121 return args;
122 }
123
EmitInstruction(const CallIntrinsicInstruction & instruction,Stack<std::string> * stack)124 void CCGenerator::EmitInstruction(const CallIntrinsicInstruction& instruction,
125 Stack<std::string>* stack) {
126 TypeVector parameter_types =
127 instruction.intrinsic->signature().parameter_types.types;
128 std::vector<std::string> args = ProcessArgumentsCommon(
129 parameter_types, instruction.constexpr_arguments, stack);
130
131 Stack<std::string> pre_call_stack = *stack;
132 const Type* return_type = instruction.intrinsic->signature().return_type;
133 std::vector<std::string> results;
134
135 const auto lowered = LowerType(return_type);
136 for (std::size_t i = 0; i < lowered.size(); ++i) {
137 results.push_back(DefinitionToVariable(instruction.GetValueDefinition(i)));
138 stack->Push(results.back());
139 decls() << " "
140 << (is_cc_debug_ ? lowered[i]->GetDebugType()
141 : lowered[i]->GetRuntimeType())
142 << " " << stack->Top() << "{}; USE(" << stack->Top() << ");\n";
143 }
144
145 out() << " ";
146 if (return_type->StructSupertype()) {
147 out() << "std::tie(";
148 PrintCommaSeparatedList(out(), results);
149 out() << ") = ";
150 } else {
151 if (results.size() == 1) {
152 out() << results[0] << " = ";
153 }
154 }
155
156 if (instruction.intrinsic->ExternalName() == "%RawDownCast") {
157 if (parameter_types.size() != 1) {
158 ReportError("%RawDownCast must take a single parameter");
159 }
160 const Type* original_type = parameter_types[0];
161 bool is_subtype =
162 return_type->IsSubtypeOf(original_type) ||
163 (original_type == TypeOracle::GetUninitializedHeapObjectType() &&
164 return_type->IsSubtypeOf(TypeOracle::GetHeapObjectType()));
165 if (!is_subtype) {
166 ReportError("%RawDownCast error: ", *return_type, " is not a subtype of ",
167 *original_type);
168 }
169 if (!original_type->StructSupertype() &&
170 return_type->GetRuntimeType() != original_type->GetRuntimeType()) {
171 out() << "static_cast<" << return_type->GetRuntimeType() << ">";
172 }
173 } else if (instruction.intrinsic->ExternalName() == "%GetClassMapConstant") {
174 ReportError("C++ generator doesn't yet support %GetClassMapConstant");
175 } else if (instruction.intrinsic->ExternalName() == "%FromConstexpr") {
176 if (parameter_types.size() != 1 || !parameter_types[0]->IsConstexpr()) {
177 ReportError(
178 "%FromConstexpr must take a single parameter with constexpr "
179 "type");
180 }
181 if (return_type->IsConstexpr()) {
182 ReportError("%FromConstexpr must return a non-constexpr type");
183 }
184 if (return_type->IsSubtypeOf(TypeOracle::GetSmiType())) {
185 if (is_cc_debug_) {
186 out() << "Internals::IntToSmi";
187 } else {
188 out() << "Smi::FromInt";
189 }
190 }
191 // Wrap the raw constexpr value in a static_cast to ensure that
192 // enums get properly casted to their backing integral value.
193 out() << "(CastToUnderlyingTypeIfEnum";
194 } else {
195 ReportError("no built in intrinsic with name " +
196 instruction.intrinsic->ExternalName());
197 }
198
199 out() << "(";
200 PrintCommaSeparatedList(out(), args);
201 if (instruction.intrinsic->ExternalName() == "%FromConstexpr") {
202 out() << ")";
203 }
204 out() << ");\n";
205 }
206
EmitInstruction(const CallCsaMacroInstruction & instruction,Stack<std::string> * stack)207 void CCGenerator::EmitInstruction(const CallCsaMacroInstruction& instruction,
208 Stack<std::string>* stack) {
209 TypeVector parameter_types =
210 instruction.macro->signature().parameter_types.types;
211 std::vector<std::string> args = ProcessArgumentsCommon(
212 parameter_types, instruction.constexpr_arguments, stack);
213
214 Stack<std::string> pre_call_stack = *stack;
215 const Type* return_type = instruction.macro->signature().return_type;
216 std::vector<std::string> results;
217
218 const auto lowered = LowerType(return_type);
219 for (std::size_t i = 0; i < lowered.size(); ++i) {
220 results.push_back(DefinitionToVariable(instruction.GetValueDefinition(i)));
221 stack->Push(results.back());
222 decls() << " "
223 << (is_cc_debug_ ? lowered[i]->GetDebugType()
224 : lowered[i]->GetRuntimeType())
225 << " " << stack->Top() << "{}; USE(" << stack->Top() << ");\n";
226 }
227
228 // We should have inlined any calls requiring complex control flow.
229 CHECK(!instruction.catch_block);
230 out() << (is_cc_debug_ ? " ASSIGN_OR_RETURN(" : " ");
231 if (return_type->StructSupertype().has_value()) {
232 out() << "std::tie(";
233 PrintCommaSeparatedList(out(), results);
234 out() << (is_cc_debug_ ? "), " : ") = ");
235 } else {
236 if (results.size() == 1) {
237 out() << results[0] << (is_cc_debug_ ? ", " : " = ");
238 } else {
239 DCHECK_EQ(0, results.size());
240 }
241 }
242
243 if (is_cc_debug_) {
244 out() << instruction.macro->CCDebugName() << "(accessor";
245 if (!args.empty()) out() << ", ";
246 } else {
247 out() << instruction.macro->CCName() << "(";
248 }
249 PrintCommaSeparatedList(out(), args);
250 if (is_cc_debug_) {
251 out() << "));\n";
252 } else {
253 out() << ");\n";
254 }
255 }
256
EmitInstruction(const CallCsaMacroAndBranchInstruction & instruction,Stack<std::string> * stack)257 void CCGenerator::EmitInstruction(
258 const CallCsaMacroAndBranchInstruction& instruction,
259 Stack<std::string>* stack) {
260 ReportError("Not supported in C++ output: CallCsaMacroAndBranch");
261 }
262
EmitInstruction(const MakeLazyNodeInstruction & instruction,Stack<std::string> * stack)263 void CCGenerator::EmitInstruction(const MakeLazyNodeInstruction& instruction,
264 Stack<std::string>* stack) {
265 ReportError("Not supported in C++ output: MakeLazyNode");
266 }
267
EmitInstruction(const CallBuiltinInstruction & instruction,Stack<std::string> * stack)268 void CCGenerator::EmitInstruction(const CallBuiltinInstruction& instruction,
269 Stack<std::string>* stack) {
270 ReportError("Not supported in C++ output: CallBuiltin");
271 }
272
EmitInstruction(const CallBuiltinPointerInstruction & instruction,Stack<std::string> * stack)273 void CCGenerator::EmitInstruction(
274 const CallBuiltinPointerInstruction& instruction,
275 Stack<std::string>* stack) {
276 ReportError("Not supported in C++ output: CallBuiltinPointer");
277 }
278
EmitInstruction(const CallRuntimeInstruction & instruction,Stack<std::string> * stack)279 void CCGenerator::EmitInstruction(const CallRuntimeInstruction& instruction,
280 Stack<std::string>* stack) {
281 ReportError("Not supported in C++ output: CallRuntime");
282 }
283
EmitInstruction(const BranchInstruction & instruction,Stack<std::string> * stack)284 void CCGenerator::EmitInstruction(const BranchInstruction& instruction,
285 Stack<std::string>* stack) {
286 out() << " if (" << stack->Pop() << ") {\n";
287 EmitGoto(instruction.if_true, stack, " ");
288 out() << " } else {\n";
289 EmitGoto(instruction.if_false, stack, " ");
290 out() << " }\n";
291 }
292
EmitInstruction(const ConstexprBranchInstruction & instruction,Stack<std::string> * stack)293 void CCGenerator::EmitInstruction(const ConstexprBranchInstruction& instruction,
294 Stack<std::string>* stack) {
295 out() << " if ((" << instruction.condition << ")) {\n";
296 EmitGoto(instruction.if_true, stack, " ");
297 out() << " } else {\n";
298 EmitGoto(instruction.if_false, stack, " ");
299 out() << " }\n";
300 }
301
EmitGoto(const Block * destination,Stack<std::string> * stack,std::string indentation)302 void CCGenerator::EmitGoto(const Block* destination, Stack<std::string>* stack,
303 std::string indentation) {
304 const auto& destination_definitions = destination->InputDefinitions();
305 DCHECK_EQ(stack->Size(), destination_definitions.Size());
306 for (BottomOffset i = {0}; i < stack->AboveTop(); ++i) {
307 DefinitionLocation def = destination_definitions.Peek(i);
308 if (def.IsPhiFromBlock(destination)) {
309 out() << indentation << DefinitionToVariable(def) << " = "
310 << stack->Peek(i) << ";\n";
311 }
312 }
313 out() << indentation << "goto " << BlockName(destination) << ";\n";
314 }
315
EmitInstruction(const GotoInstruction & instruction,Stack<std::string> * stack)316 void CCGenerator::EmitInstruction(const GotoInstruction& instruction,
317 Stack<std::string>* stack) {
318 EmitGoto(instruction.destination, stack, " ");
319 }
320
EmitInstruction(const GotoExternalInstruction & instruction,Stack<std::string> * stack)321 void CCGenerator::EmitInstruction(const GotoExternalInstruction& instruction,
322 Stack<std::string>* stack) {
323 ReportError("Not supported in C++ output: GotoExternal");
324 }
325
EmitInstruction(const ReturnInstruction & instruction,Stack<std::string> * stack)326 void CCGenerator::EmitInstruction(const ReturnInstruction& instruction,
327 Stack<std::string>* stack) {
328 ReportError("Not supported in C++ output: Return");
329 }
330
EmitInstruction(const PrintConstantStringInstruction & instruction,Stack<std::string> * stack)331 void CCGenerator::EmitInstruction(
332 const PrintConstantStringInstruction& instruction,
333 Stack<std::string>* stack) {
334 out() << " std::cout << " << StringLiteralQuote(instruction.message)
335 << ";\n";
336 }
337
EmitInstruction(const AbortInstruction & instruction,Stack<std::string> * stack)338 void CCGenerator::EmitInstruction(const AbortInstruction& instruction,
339 Stack<std::string>* stack) {
340 switch (instruction.kind) {
341 case AbortInstruction::Kind::kUnreachable:
342 DCHECK(instruction.message.empty());
343 out() << " UNREACHABLE();\n";
344 break;
345 case AbortInstruction::Kind::kDebugBreak:
346 DCHECK(instruction.message.empty());
347 out() << " base::OS::DebugBreak();\n";
348 break;
349 case AbortInstruction::Kind::kAssertionFailure: {
350 std::string file = StringLiteralQuote(
351 SourceFileMap::PathFromV8Root(instruction.pos.source));
352 out() << " CHECK(false, \"Failed Torque assertion: '\""
353 << StringLiteralQuote(instruction.message) << "\"' at \"" << file
354 << "\":\""
355 << StringLiteralQuote(
356 std::to_string(instruction.pos.start.line + 1))
357 << ");\n";
358 break;
359 }
360 }
361 }
362
EmitInstruction(const UnsafeCastInstruction & instruction,Stack<std::string> * stack)363 void CCGenerator::EmitInstruction(const UnsafeCastInstruction& instruction,
364 Stack<std::string>* stack) {
365 const std::string str = "static_cast<" +
366 instruction.destination_type->GetRuntimeType() +
367 ">(" + stack->Top() + ")";
368 stack->Poke(stack->AboveTop() - 1, str);
369 SetDefinitionVariable(instruction.GetValueDefinition(), str);
370 }
371
EmitInstruction(const LoadReferenceInstruction & instruction,Stack<std::string> * stack)372 void CCGenerator::EmitInstruction(const LoadReferenceInstruction& instruction,
373 Stack<std::string>* stack) {
374 std::string result_name =
375 DefinitionToVariable(instruction.GetValueDefinition());
376
377 std::string offset = stack->Pop();
378 std::string object = stack->Pop();
379 stack->Push(result_name);
380
381 if (!is_cc_debug_) {
382 std::string result_type = instruction.type->GetRuntimeType();
383 decls() << " " << result_type << " " << result_name << "{}; USE("
384 << result_name << ");\n";
385 out() << " " << result_name << " = ";
386 if (instruction.type->IsSubtypeOf(TypeOracle::GetTaggedType())) {
387 // Currently, all of the tagged loads we emit are for smi values, so there
388 // is no point in providing an PtrComprCageBase. If at some point we start
389 // emitting loads for tagged fields which might be HeapObjects, then we
390 // should plumb an PtrComprCageBase through the generated functions that
391 // need it.
392 if (!instruction.type->IsSubtypeOf(TypeOracle::GetSmiType())) {
393 Error(
394 "Not supported in C++ output: LoadReference on non-smi tagged "
395 "value");
396 }
397
398 // References and slices can cause some values to have the Torque type
399 // HeapObject|TaggedZeroPattern, which is output as "Object". TaggedField
400 // requires HeapObject, so we need a cast.
401 out() << "TaggedField<" << result_type
402 << ">::load(*static_cast<HeapObject*>(&" << object
403 << "), static_cast<int>(" << offset << "));\n";
404 } else {
405 out() << "(" << object << ").ReadField<" << result_type << ">(" << offset
406 << ");\n";
407 }
408 } else {
409 std::string result_type = instruction.type->GetDebugType();
410 decls() << " " << result_type << " " << result_name << "{}; USE("
411 << result_name << ");\n";
412 if (instruction.type->IsSubtypeOf(TypeOracle::GetTaggedType())) {
413 out() << " READ_TAGGED_FIELD_OR_FAIL(" << result_name << ", accessor, "
414 << object << ", static_cast<int>(" << offset << "));\n";
415 } else {
416 out() << " READ_FIELD_OR_FAIL(" << result_type << ", " << result_name
417 << ", accessor, " << object << ", " << offset << ");\n";
418 }
419 }
420 }
421
EmitInstruction(const StoreReferenceInstruction & instruction,Stack<std::string> * stack)422 void CCGenerator::EmitInstruction(const StoreReferenceInstruction& instruction,
423 Stack<std::string>* stack) {
424 ReportError("Not supported in C++ output: StoreReference");
425 }
426
427 namespace {
GetBitFieldSpecialization(const Type * container,const BitField & field)428 std::string GetBitFieldSpecialization(const Type* container,
429 const BitField& field) {
430 std::stringstream stream;
431 stream << "base::BitField<"
432 << field.name_and_type.type->GetConstexprGeneratedTypeName() << ", "
433 << field.offset << ", " << field.num_bits << ", "
434 << container->GetConstexprGeneratedTypeName() << ">";
435 return stream.str();
436 }
437 } // namespace
438
EmitInstruction(const LoadBitFieldInstruction & instruction,Stack<std::string> * stack)439 void CCGenerator::EmitInstruction(const LoadBitFieldInstruction& instruction,
440 Stack<std::string>* stack) {
441 std::string result_name =
442 DefinitionToVariable(instruction.GetValueDefinition());
443
444 std::string bit_field_struct = stack->Pop();
445 stack->Push(result_name);
446
447 const Type* struct_type = instruction.bit_field_struct_type;
448
449 decls() << " " << instruction.bit_field.name_and_type.type->GetRuntimeType()
450 << " " << result_name << "{}; USE(" << result_name << ");\n";
451
452 base::Optional<const Type*> smi_tagged_type =
453 Type::MatchUnaryGeneric(struct_type, TypeOracle::GetSmiTaggedGeneric());
454 if (smi_tagged_type) {
455 // Get the untagged value and its type.
456 if (is_cc_debug_) {
457 bit_field_struct = "Internals::SmiValue(" + bit_field_struct + ")";
458 } else {
459 bit_field_struct = bit_field_struct + ".value()";
460 }
461 struct_type = *smi_tagged_type;
462 }
463
464 out() << " " << result_name << " = CastToUnderlyingTypeIfEnum("
465 << GetBitFieldSpecialization(struct_type, instruction.bit_field)
466 << "::decode(" << bit_field_struct << "));\n";
467 }
468
EmitInstruction(const StoreBitFieldInstruction & instruction,Stack<std::string> * stack)469 void CCGenerator::EmitInstruction(const StoreBitFieldInstruction& instruction,
470 Stack<std::string>* stack) {
471 ReportError("Not supported in C++ output: StoreBitField");
472 }
473
474 namespace {
475
CollectAllFields(const VisitResult & result,const Stack<std::string> & values,std::vector<std::string> & all_fields)476 void CollectAllFields(const VisitResult& result,
477 const Stack<std::string>& values,
478 std::vector<std::string>& all_fields) {
479 if (!result.IsOnStack()) {
480 all_fields.push_back(result.constexpr_value());
481 } else if (auto struct_type = result.type()->StructSupertype()) {
482 for (const Field& field : (*struct_type)->fields()) {
483 CollectAllFields(ProjectStructField(result, field.name_and_type.name),
484 values, all_fields);
485 }
486 } else {
487 DCHECK_EQ(1, result.stack_range().Size());
488 all_fields.push_back(values.Peek(result.stack_range().begin()));
489 }
490 }
491
492 } // namespace
493
494 // static
EmitCCValue(VisitResult result,const Stack<std::string> & values,std::ostream & out)495 void CCGenerator::EmitCCValue(VisitResult result,
496 const Stack<std::string>& values,
497 std::ostream& out) {
498 std::vector<std::string> all_fields;
499 CollectAllFields(result, values, all_fields);
500 if (all_fields.size() == 1) {
501 out << all_fields[0];
502 } else {
503 out << "std::make_tuple(";
504 PrintCommaSeparatedList(out, all_fields);
505 out << ")";
506 }
507 }
508
509 } // namespace torque
510 } // namespace internal
511 } // namespace v8
512