1 /*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #ifndef SkJumper_misc_DEFINED
9 #define SkJumper_misc_DEFINED
10
11 #include <string.h> // for memcpy()
12
13 // Miscellany used by SkJumper_stages.cpp and SkJumper_stages_lowp.cpp.
14
15 // Every function in this file should be marked static and inline using SI.
16 #if defined(__clang__)
17 #define SI __attribute__((always_inline)) static inline
18 #else
19 #define SI static inline
20 #endif
21
22
23 template <typename T, typename P>
unaligned_load(const P * p)24 SI T unaligned_load(const P* p) { // const void* would work too, but const P* helps ARMv7 codegen.
25 T v;
26 memcpy(&v, p, sizeof(v));
27 return v;
28 }
29
30 template <typename T, typename P>
unaligned_store(P * p,T v)31 SI void unaligned_store(P* p, T v) {
32 memcpy(p, &v, sizeof(v));
33 }
34
35 template <typename Dst, typename Src>
bit_cast(const Src & src)36 SI Dst bit_cast(const Src& src) {
37 static_assert(sizeof(Dst) == sizeof(Src), "");
38 return unaligned_load<Dst>(&src);
39 }
40
41 template <typename Dst, typename Src>
widen_cast(const Src & src)42 SI Dst widen_cast(const Src& src) {
43 static_assert(sizeof(Dst) > sizeof(Src), "");
44 Dst dst;
45 memcpy(&dst, &src, sizeof(Src));
46 return dst;
47 }
48
49 // Our program is an array of void*, either
50 // - 1 void* per stage with no context pointer, the next stage;
51 // - 2 void* per stage with a context pointer, first the context pointer, then the next stage.
52
53 // load_and_inc() steps the program forward by 1 void*, returning that pointer.
load_and_inc(void ** & program)54 SI void* load_and_inc(void**& program) {
55 #if defined(__GNUC__) && defined(__x86_64__)
56 // If program is in %rsi (we try to make this likely) then this is a single instruction.
57 void* rax;
58 asm("lodsq" : "=a"(rax), "+S"(program)); // Write-only %rax, read-write %rsi.
59 return rax;
60 #else
61 // On ARM *program++ compiles into pretty ideal code without any handholding.
62 return *program++;
63 #endif
64 }
65
66 // Lazily resolved on first cast. Does nothing if cast to Ctx::None.
67 struct Ctx {
68 struct None {};
69
70 void* ptr;
71 void**& program;
72
CtxCtx73 explicit Ctx(void**& p) : ptr(nullptr), program(p) {}
74
75 template <typename T>
76 operator T*() {
77 if (!ptr) { ptr = load_and_inc(program); }
78 return (T*)ptr;
79 }
NoneCtx80 operator None() { return None{}; }
81 };
82
83 #endif//SkJumper_misc_DEFINED
84