1 /*
2 * Copyright 2021 Alyssa Rosenzweig
3 * Copyright 2019 Collabora, Ltd.
4 * SPDX-License-Identifier: MIT
5 */
6
7 #pragma once
8
9 #include <stdbool.h>
10 #include <stddef.h>
11 #include <stdint.h>
12 #include <time.h>
13 #include "util/list.h"
14
15 struct agx_device;
16
17 enum agx_alloc_type {
18 AGX_ALLOC_REGULAR = 0,
19 AGX_ALLOC_MEMMAP = 1,
20 AGX_ALLOC_CMDBUF = 2,
21 AGX_NUM_ALLOC,
22 };
23
24 enum agx_bo_flags {
25 /* BO is shared across processes (imported or exported) and therefore cannot
26 * be cached locally
27 */
28 AGX_BO_SHARED = 1 << 0,
29
30 /* BO must be allocated in the low 32-bits of VA space */
31 AGX_BO_LOW_VA = 1 << 1,
32
33 /* BO is executable */
34 AGX_BO_EXEC = 1 << 2,
35
36 /* BO should be mapped write-back on the CPU (else, write combine) */
37 AGX_BO_WRITEBACK = 1 << 3,
38
39 /* BO could potentially be shared (imported or exported) and therefore cannot
40 * be allocated as private
41 */
42 AGX_BO_SHAREABLE = 1 << 4,
43
44 /* BO is read-only from the GPU side
45 */
46 AGX_BO_READONLY = 1 << 5,
47 };
48
49 struct agx_ptr {
50 /* If CPU mapped, CPU address. NULL if not mapped */
51 void *cpu;
52
53 /* If type REGULAR, mapped GPU address */
54 uint64_t gpu;
55 };
56
57 struct agx_bo {
58 /* Must be first for casting */
59 struct list_head bucket_link;
60
61 /* Used to link the BO to the BO cache LRU list. */
62 struct list_head lru_link;
63
64 /* The time this BO was used last, so we can evict stale BOs. */
65 time_t last_used;
66
67 enum agx_alloc_type type;
68
69 /* Creation attributes */
70 enum agx_bo_flags flags;
71 size_t size;
72 size_t align;
73
74 /* Mapping */
75 struct agx_ptr ptr;
76
77 /* Index unique only up to type, process-local */
78 uint32_t handle;
79
80 /* DMA-BUF fd clone for adding fences to imports/exports */
81 int prime_fd;
82
83 /* Syncobj handle of the current writer, if any */
84 uint32_t writer_syncobj;
85
86 /* Globally unique value (system wide) for tracing. Exists for resources,
87 * command buffers, GPU submissions, segments, segmentent lists, encoders,
88 * accelerators, and channels. Corresponds to Instruments' magic table
89 * metal-gpu-submission-to-command-buffer-id */
90 uint64_t guid;
91
92 /* Human-readable label, or NULL if none */
93 char *name;
94
95 /* Owner */
96 struct agx_device *dev;
97
98 /* Update atomically */
99 int32_t refcnt;
100
101 /* Used while decoding, marked read-only */
102 bool ro;
103
104 /* Used while decoding, mapped */
105 bool mapped;
106
107 /* For debugging */
108 const char *label;
109 };
110
111 struct agx_bo *agx_bo_create_aligned(struct agx_device *dev, unsigned size,
112 unsigned align, enum agx_bo_flags flags,
113 const char *label);
114 static inline struct agx_bo *
agx_bo_create(struct agx_device * dev,unsigned size,enum agx_bo_flags flags,const char * label)115 agx_bo_create(struct agx_device *dev, unsigned size, enum agx_bo_flags flags,
116 const char *label)
117 {
118 return agx_bo_create_aligned(dev, size, 0, flags, label);
119 }
120
121 void agx_bo_reference(struct agx_bo *bo);
122 void agx_bo_unreference(struct agx_bo *bo);
123 struct agx_bo *agx_bo_import(struct agx_device *dev, int fd);
124 int agx_bo_export(struct agx_bo *bo);
125
126 void agx_bo_free(struct agx_device *dev, struct agx_bo *bo);
127 struct agx_bo *agx_bo_alloc(struct agx_device *dev, size_t size, size_t align,
128 enum agx_bo_flags flags);
129 struct agx_bo *agx_bo_cache_fetch(struct agx_device *dev, size_t size,
130 size_t align, uint32_t flags,
131 const bool dontwait);
132 void agx_bo_cache_evict_all(struct agx_device *dev);
133