Commit | Line | Data |
---|---|---|
771fe6b9 JG |
1 | /* |
2 | * Copyright 2008 Advanced Micro Devices, Inc. | |
3 | * Copyright 2008 Red Hat Inc. | |
4 | * Copyright 2009 Jerome Glisse. | |
5 | * | |
6 | * Permission is hereby granted, free of charge, to any person obtaining a | |
7 | * copy of this software and associated documentation files (the "Software"), | |
8 | * to deal in the Software without restriction, including without limitation | |
9 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | |
10 | * and/or sell copies of the Software, and to permit persons to whom the | |
11 | * Software is furnished to do so, subject to the following conditions: | |
12 | * | |
13 | * The above copyright notice and this permission notice shall be included in | |
14 | * all copies or substantial portions of the Software. | |
15 | * | |
16 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
17 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
18 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
19 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR | |
20 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, | |
21 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | |
22 | * OTHER DEALINGS IN THE SOFTWARE. | |
23 | * | |
24 | * Authors: Dave Airlie | |
25 | * Alex Deucher | |
26 | * Jerome Glisse | |
27 | */ | |
28 | #ifndef __RADEON_ASIC_H__ | |
29 | #define __RADEON_ASIC_H__ | |
30 | ||
31 | /* | |
32 | * common functions | |
33 | */ | |
7433874e | 34 | uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev); |
771fe6b9 | 35 | void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); |
5ea597f3 | 36 | uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev); |
771fe6b9 JG |
37 | void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); |
38 | ||
7433874e | 39 | uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev); |
771fe6b9 | 40 | void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); |
7433874e | 41 | uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev); |
771fe6b9 JG |
42 | void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock); |
43 | void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); | |
44 | ||
45 | /* | |
44ca7478 | 46 | * r100,rv100,rs100,rv200,rs200 |
771fe6b9 | 47 | */ |
d4550907 JG |
48 | extern int r100_init(struct radeon_device *rdev); |
49 | extern void r100_fini(struct radeon_device *rdev); | |
50 | extern int r100_suspend(struct radeon_device *rdev); | |
51 | extern int r100_resume(struct radeon_device *rdev); | |
771fe6b9 JG |
52 | uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); |
53 | void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
28d52043 | 54 | void r100_vga_set_state(struct radeon_device *rdev, bool state); |
771fe6b9 | 55 | int r100_gpu_reset(struct radeon_device *rdev); |
7ed220d7 | 56 | u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc); |
771fe6b9 JG |
57 | void r100_pci_gart_tlb_flush(struct radeon_device *rdev); |
58 | int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | |
3ce0a23d | 59 | void r100_cp_commit(struct radeon_device *rdev); |
771fe6b9 JG |
60 | void r100_ring_start(struct radeon_device *rdev); |
61 | int r100_irq_set(struct radeon_device *rdev); | |
62 | int r100_irq_process(struct radeon_device *rdev); | |
63 | void r100_fence_ring_emit(struct radeon_device *rdev, | |
64 | struct radeon_fence *fence); | |
65 | int r100_cs_parse(struct radeon_cs_parser *p); | |
66 | void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
67 | uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg); | |
68 | int r100_copy_blit(struct radeon_device *rdev, | |
69 | uint64_t src_offset, | |
70 | uint64_t dst_offset, | |
71 | unsigned num_pages, | |
72 | struct radeon_fence *fence); | |
e024e110 DA |
73 | int r100_set_surface_reg(struct radeon_device *rdev, int reg, |
74 | uint32_t tiling_flags, uint32_t pitch, | |
75 | uint32_t offset, uint32_t obj_size); | |
76 | int r100_clear_surface_reg(struct radeon_device *rdev, int reg); | |
c93bb85b | 77 | void r100_bandwidth_update(struct radeon_device *rdev); |
3ce0a23d | 78 | void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); |
3ce0a23d | 79 | int r100_ring_test(struct radeon_device *rdev); |
429770b3 AD |
80 | void r100_hpd_init(struct radeon_device *rdev); |
81 | void r100_hpd_fini(struct radeon_device *rdev); | |
82 | bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); | |
83 | void r100_hpd_set_polarity(struct radeon_device *rdev, | |
84 | enum radeon_hpd_id hpd); | |
771fe6b9 JG |
85 | |
86 | static struct radeon_asic r100_asic = { | |
068a117c | 87 | .init = &r100_init, |
d4550907 JG |
88 | .fini = &r100_fini, |
89 | .suspend = &r100_suspend, | |
90 | .resume = &r100_resume, | |
28d52043 | 91 | .vga_set_state = &r100_vga_set_state, |
771fe6b9 | 92 | .gpu_reset = &r100_gpu_reset, |
771fe6b9 JG |
93 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
94 | .gart_set_page = &r100_pci_gart_set_page, | |
3ce0a23d | 95 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 96 | .ring_start = &r100_ring_start, |
3ce0a23d JG |
97 | .ring_test = &r100_ring_test, |
98 | .ring_ib_execute = &r100_ring_ib_execute, | |
771fe6b9 JG |
99 | .irq_set = &r100_irq_set, |
100 | .irq_process = &r100_irq_process, | |
7ed220d7 | 101 | .get_vblank_counter = &r100_get_vblank_counter, |
771fe6b9 JG |
102 | .fence_ring_emit = &r100_fence_ring_emit, |
103 | .cs_parse = &r100_cs_parse, | |
104 | .copy_blit = &r100_copy_blit, | |
105 | .copy_dma = NULL, | |
106 | .copy = &r100_copy_blit, | |
7433874e | 107 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
771fe6b9 | 108 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
5ea597f3 | 109 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
771fe6b9 | 110 | .set_memory_clock = NULL, |
c836a412 | 111 | .get_pcie_lanes = NULL, |
771fe6b9 JG |
112 | .set_pcie_lanes = NULL, |
113 | .set_clock_gating = &radeon_legacy_set_clock_gating, | |
e024e110 DA |
114 | .set_surface_reg = r100_set_surface_reg, |
115 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 116 | .bandwidth_update = &r100_bandwidth_update, |
429770b3 AD |
117 | .hpd_init = &r100_hpd_init, |
118 | .hpd_fini = &r100_hpd_fini, | |
119 | .hpd_sense = &r100_hpd_sense, | |
120 | .hpd_set_polarity = &r100_hpd_set_polarity, | |
062b389c | 121 | .ioctl_wait_idle = NULL, |
771fe6b9 JG |
122 | }; |
123 | ||
44ca7478 PN |
124 | /* |
125 | * r200,rv250,rs300,rv280 | |
126 | */ | |
127 | extern int r200_copy_dma(struct radeon_device *rdev, | |
128 | uint64_t src_offset, | |
129 | uint64_t dst_offset, | |
130 | unsigned num_pages, | |
131 | struct radeon_fence *fence); | |
132 | static struct radeon_asic r200_asic = { | |
133 | .init = &r100_init, | |
134 | .fini = &r100_fini, | |
135 | .suspend = &r100_suspend, | |
136 | .resume = &r100_resume, | |
137 | .vga_set_state = &r100_vga_set_state, | |
138 | .gpu_reset = &r100_gpu_reset, | |
139 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, | |
140 | .gart_set_page = &r100_pci_gart_set_page, | |
141 | .cp_commit = &r100_cp_commit, | |
142 | .ring_start = &r100_ring_start, | |
143 | .ring_test = &r100_ring_test, | |
144 | .ring_ib_execute = &r100_ring_ib_execute, | |
145 | .irq_set = &r100_irq_set, | |
146 | .irq_process = &r100_irq_process, | |
147 | .get_vblank_counter = &r100_get_vblank_counter, | |
148 | .fence_ring_emit = &r100_fence_ring_emit, | |
149 | .cs_parse = &r100_cs_parse, | |
150 | .copy_blit = &r100_copy_blit, | |
151 | .copy_dma = &r200_copy_dma, | |
152 | .copy = &r100_copy_blit, | |
153 | .get_engine_clock = &radeon_legacy_get_engine_clock, | |
154 | .set_engine_clock = &radeon_legacy_set_engine_clock, | |
155 | .get_memory_clock = &radeon_legacy_get_memory_clock, | |
156 | .set_memory_clock = NULL, | |
157 | .set_pcie_lanes = NULL, | |
158 | .set_clock_gating = &radeon_legacy_set_clock_gating, | |
159 | .set_surface_reg = r100_set_surface_reg, | |
160 | .clear_surface_reg = r100_clear_surface_reg, | |
161 | .bandwidth_update = &r100_bandwidth_update, | |
162 | .hpd_init = &r100_hpd_init, | |
163 | .hpd_fini = &r100_hpd_fini, | |
164 | .hpd_sense = &r100_hpd_sense, | |
165 | .hpd_set_polarity = &r100_hpd_set_polarity, | |
166 | .ioctl_wait_idle = NULL, | |
167 | }; | |
168 | ||
771fe6b9 JG |
169 | |
170 | /* | |
171 | * r300,r350,rv350,rv380 | |
172 | */ | |
207bf9e9 JG |
173 | extern int r300_init(struct radeon_device *rdev); |
174 | extern void r300_fini(struct radeon_device *rdev); | |
175 | extern int r300_suspend(struct radeon_device *rdev); | |
176 | extern int r300_resume(struct radeon_device *rdev); | |
177 | extern int r300_gpu_reset(struct radeon_device *rdev); | |
178 | extern void r300_ring_start(struct radeon_device *rdev); | |
179 | extern void r300_fence_ring_emit(struct radeon_device *rdev, | |
180 | struct radeon_fence *fence); | |
181 | extern int r300_cs_parse(struct radeon_cs_parser *p); | |
182 | extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev); | |
183 | extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | |
184 | extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg); | |
185 | extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
186 | extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); | |
c836a412 | 187 | extern int rv370_get_pcie_lanes(struct radeon_device *rdev); |
44ca7478 | 188 | |
771fe6b9 | 189 | static struct radeon_asic r300_asic = { |
068a117c | 190 | .init = &r300_init, |
207bf9e9 JG |
191 | .fini = &r300_fini, |
192 | .suspend = &r300_suspend, | |
193 | .resume = &r300_resume, | |
28d52043 | 194 | .vga_set_state = &r100_vga_set_state, |
771fe6b9 | 195 | .gpu_reset = &r300_gpu_reset, |
771fe6b9 JG |
196 | .gart_tlb_flush = &r100_pci_gart_tlb_flush, |
197 | .gart_set_page = &r100_pci_gart_set_page, | |
3ce0a23d | 198 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 199 | .ring_start = &r300_ring_start, |
3ce0a23d JG |
200 | .ring_test = &r100_ring_test, |
201 | .ring_ib_execute = &r100_ring_ib_execute, | |
771fe6b9 JG |
202 | .irq_set = &r100_irq_set, |
203 | .irq_process = &r100_irq_process, | |
7ed220d7 | 204 | .get_vblank_counter = &r100_get_vblank_counter, |
771fe6b9 JG |
205 | .fence_ring_emit = &r300_fence_ring_emit, |
206 | .cs_parse = &r300_cs_parse, | |
207 | .copy_blit = &r100_copy_blit, | |
44ca7478 | 208 | .copy_dma = &r200_copy_dma, |
771fe6b9 | 209 | .copy = &r100_copy_blit, |
7433874e | 210 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
771fe6b9 | 211 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
5ea597f3 | 212 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
771fe6b9 | 213 | .set_memory_clock = NULL, |
c836a412 | 214 | .get_pcie_lanes = &rv370_get_pcie_lanes, |
771fe6b9 JG |
215 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
216 | .set_clock_gating = &radeon_legacy_set_clock_gating, | |
e024e110 DA |
217 | .set_surface_reg = r100_set_surface_reg, |
218 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 219 | .bandwidth_update = &r100_bandwidth_update, |
d80eeb0f PN |
220 | .hpd_init = &r100_hpd_init, |
221 | .hpd_fini = &r100_hpd_fini, | |
222 | .hpd_sense = &r100_hpd_sense, | |
223 | .hpd_set_polarity = &r100_hpd_set_polarity, | |
224 | .ioctl_wait_idle = NULL, | |
225 | }; | |
226 | ||
227 | ||
228 | static struct radeon_asic r300_asic_pcie = { | |
229 | .init = &r300_init, | |
230 | .fini = &r300_fini, | |
231 | .suspend = &r300_suspend, | |
232 | .resume = &r300_resume, | |
233 | .vga_set_state = &r100_vga_set_state, | |
234 | .gpu_reset = &r300_gpu_reset, | |
235 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, | |
236 | .gart_set_page = &rv370_pcie_gart_set_page, | |
237 | .cp_commit = &r100_cp_commit, | |
238 | .ring_start = &r300_ring_start, | |
239 | .ring_test = &r100_ring_test, | |
240 | .ring_ib_execute = &r100_ring_ib_execute, | |
241 | .irq_set = &r100_irq_set, | |
242 | .irq_process = &r100_irq_process, | |
243 | .get_vblank_counter = &r100_get_vblank_counter, | |
244 | .fence_ring_emit = &r300_fence_ring_emit, | |
245 | .cs_parse = &r300_cs_parse, | |
246 | .copy_blit = &r100_copy_blit, | |
247 | .copy_dma = &r200_copy_dma, | |
248 | .copy = &r100_copy_blit, | |
249 | .get_engine_clock = &radeon_legacy_get_engine_clock, | |
250 | .set_engine_clock = &radeon_legacy_set_engine_clock, | |
251 | .get_memory_clock = &radeon_legacy_get_memory_clock, | |
252 | .set_memory_clock = NULL, | |
253 | .set_pcie_lanes = &rv370_set_pcie_lanes, | |
254 | .set_clock_gating = &radeon_legacy_set_clock_gating, | |
255 | .set_surface_reg = r100_set_surface_reg, | |
256 | .clear_surface_reg = r100_clear_surface_reg, | |
257 | .bandwidth_update = &r100_bandwidth_update, | |
429770b3 AD |
258 | .hpd_init = &r100_hpd_init, |
259 | .hpd_fini = &r100_hpd_fini, | |
260 | .hpd_sense = &r100_hpd_sense, | |
261 | .hpd_set_polarity = &r100_hpd_set_polarity, | |
062b389c | 262 | .ioctl_wait_idle = NULL, |
771fe6b9 JG |
263 | }; |
264 | ||
265 | /* | |
266 | * r420,r423,rv410 | |
267 | */ | |
9f022ddf JG |
268 | extern int r420_init(struct radeon_device *rdev); |
269 | extern void r420_fini(struct radeon_device *rdev); | |
270 | extern int r420_suspend(struct radeon_device *rdev); | |
271 | extern int r420_resume(struct radeon_device *rdev); | |
771fe6b9 | 272 | static struct radeon_asic r420_asic = { |
9f022ddf JG |
273 | .init = &r420_init, |
274 | .fini = &r420_fini, | |
275 | .suspend = &r420_suspend, | |
276 | .resume = &r420_resume, | |
28d52043 | 277 | .vga_set_state = &r100_vga_set_state, |
771fe6b9 | 278 | .gpu_reset = &r300_gpu_reset, |
771fe6b9 JG |
279 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
280 | .gart_set_page = &rv370_pcie_gart_set_page, | |
3ce0a23d | 281 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 282 | .ring_start = &r300_ring_start, |
3ce0a23d JG |
283 | .ring_test = &r100_ring_test, |
284 | .ring_ib_execute = &r100_ring_ib_execute, | |
771fe6b9 JG |
285 | .irq_set = &r100_irq_set, |
286 | .irq_process = &r100_irq_process, | |
7ed220d7 | 287 | .get_vblank_counter = &r100_get_vblank_counter, |
771fe6b9 JG |
288 | .fence_ring_emit = &r300_fence_ring_emit, |
289 | .cs_parse = &r300_cs_parse, | |
290 | .copy_blit = &r100_copy_blit, | |
44ca7478 | 291 | .copy_dma = &r200_copy_dma, |
771fe6b9 | 292 | .copy = &r100_copy_blit, |
7433874e | 293 | .get_engine_clock = &radeon_atom_get_engine_clock, |
771fe6b9 | 294 | .set_engine_clock = &radeon_atom_set_engine_clock, |
7433874e | 295 | .get_memory_clock = &radeon_atom_get_memory_clock, |
771fe6b9 | 296 | .set_memory_clock = &radeon_atom_set_memory_clock, |
c836a412 | 297 | .get_pcie_lanes = &rv370_get_pcie_lanes, |
771fe6b9 JG |
298 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
299 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
e024e110 DA |
300 | .set_surface_reg = r100_set_surface_reg, |
301 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 302 | .bandwidth_update = &r100_bandwidth_update, |
429770b3 AD |
303 | .hpd_init = &r100_hpd_init, |
304 | .hpd_fini = &r100_hpd_fini, | |
305 | .hpd_sense = &r100_hpd_sense, | |
306 | .hpd_set_polarity = &r100_hpd_set_polarity, | |
062b389c | 307 | .ioctl_wait_idle = NULL, |
771fe6b9 JG |
308 | }; |
309 | ||
310 | ||
311 | /* | |
312 | * rs400,rs480 | |
313 | */ | |
ca6ffc64 JG |
314 | extern int rs400_init(struct radeon_device *rdev); |
315 | extern void rs400_fini(struct radeon_device *rdev); | |
316 | extern int rs400_suspend(struct radeon_device *rdev); | |
317 | extern int rs400_resume(struct radeon_device *rdev); | |
771fe6b9 JG |
318 | void rs400_gart_tlb_flush(struct radeon_device *rdev); |
319 | int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | |
320 | uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); | |
321 | void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
322 | static struct radeon_asic rs400_asic = { | |
ca6ffc64 JG |
323 | .init = &rs400_init, |
324 | .fini = &rs400_fini, | |
325 | .suspend = &rs400_suspend, | |
326 | .resume = &rs400_resume, | |
28d52043 | 327 | .vga_set_state = &r100_vga_set_state, |
771fe6b9 | 328 | .gpu_reset = &r300_gpu_reset, |
771fe6b9 JG |
329 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
330 | .gart_set_page = &rs400_gart_set_page, | |
3ce0a23d | 331 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 332 | .ring_start = &r300_ring_start, |
3ce0a23d JG |
333 | .ring_test = &r100_ring_test, |
334 | .ring_ib_execute = &r100_ring_ib_execute, | |
771fe6b9 JG |
335 | .irq_set = &r100_irq_set, |
336 | .irq_process = &r100_irq_process, | |
7ed220d7 | 337 | .get_vblank_counter = &r100_get_vblank_counter, |
771fe6b9 JG |
338 | .fence_ring_emit = &r300_fence_ring_emit, |
339 | .cs_parse = &r300_cs_parse, | |
340 | .copy_blit = &r100_copy_blit, | |
44ca7478 | 341 | .copy_dma = &r200_copy_dma, |
771fe6b9 | 342 | .copy = &r100_copy_blit, |
7433874e | 343 | .get_engine_clock = &radeon_legacy_get_engine_clock, |
771fe6b9 | 344 | .set_engine_clock = &radeon_legacy_set_engine_clock, |
5ea597f3 | 345 | .get_memory_clock = &radeon_legacy_get_memory_clock, |
771fe6b9 | 346 | .set_memory_clock = NULL, |
c836a412 | 347 | .get_pcie_lanes = NULL, |
771fe6b9 JG |
348 | .set_pcie_lanes = NULL, |
349 | .set_clock_gating = &radeon_legacy_set_clock_gating, | |
e024e110 DA |
350 | .set_surface_reg = r100_set_surface_reg, |
351 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 352 | .bandwidth_update = &r100_bandwidth_update, |
429770b3 AD |
353 | .hpd_init = &r100_hpd_init, |
354 | .hpd_fini = &r100_hpd_fini, | |
355 | .hpd_sense = &r100_hpd_sense, | |
356 | .hpd_set_polarity = &r100_hpd_set_polarity, | |
062b389c | 357 | .ioctl_wait_idle = NULL, |
771fe6b9 JG |
358 | }; |
359 | ||
360 | ||
361 | /* | |
362 | * rs600. | |
363 | */ | |
c010f800 JG |
364 | extern int rs600_init(struct radeon_device *rdev); |
365 | extern void rs600_fini(struct radeon_device *rdev); | |
366 | extern int rs600_suspend(struct radeon_device *rdev); | |
367 | extern int rs600_resume(struct radeon_device *rdev); | |
771fe6b9 | 368 | int rs600_irq_set(struct radeon_device *rdev); |
7ed220d7 MD |
369 | int rs600_irq_process(struct radeon_device *rdev); |
370 | u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc); | |
771fe6b9 JG |
371 | void rs600_gart_tlb_flush(struct radeon_device *rdev); |
372 | int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); | |
373 | uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); | |
374 | void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
c93bb85b | 375 | void rs600_bandwidth_update(struct radeon_device *rdev); |
429770b3 AD |
376 | void rs600_hpd_init(struct radeon_device *rdev); |
377 | void rs600_hpd_fini(struct radeon_device *rdev); | |
378 | bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); | |
379 | void rs600_hpd_set_polarity(struct radeon_device *rdev, | |
380 | enum radeon_hpd_id hpd); | |
381 | ||
771fe6b9 | 382 | static struct radeon_asic rs600_asic = { |
3f7dc91a | 383 | .init = &rs600_init, |
c010f800 JG |
384 | .fini = &rs600_fini, |
385 | .suspend = &rs600_suspend, | |
386 | .resume = &rs600_resume, | |
28d52043 | 387 | .vga_set_state = &r100_vga_set_state, |
771fe6b9 | 388 | .gpu_reset = &r300_gpu_reset, |
771fe6b9 JG |
389 | .gart_tlb_flush = &rs600_gart_tlb_flush, |
390 | .gart_set_page = &rs600_gart_set_page, | |
3ce0a23d | 391 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 392 | .ring_start = &r300_ring_start, |
3ce0a23d JG |
393 | .ring_test = &r100_ring_test, |
394 | .ring_ib_execute = &r100_ring_ib_execute, | |
771fe6b9 | 395 | .irq_set = &rs600_irq_set, |
7ed220d7 MD |
396 | .irq_process = &rs600_irq_process, |
397 | .get_vblank_counter = &rs600_get_vblank_counter, | |
771fe6b9 JG |
398 | .fence_ring_emit = &r300_fence_ring_emit, |
399 | .cs_parse = &r300_cs_parse, | |
400 | .copy_blit = &r100_copy_blit, | |
44ca7478 | 401 | .copy_dma = &r200_copy_dma, |
771fe6b9 | 402 | .copy = &r100_copy_blit, |
7433874e | 403 | .get_engine_clock = &radeon_atom_get_engine_clock, |
771fe6b9 | 404 | .set_engine_clock = &radeon_atom_set_engine_clock, |
7433874e | 405 | .get_memory_clock = &radeon_atom_get_memory_clock, |
771fe6b9 | 406 | .set_memory_clock = &radeon_atom_set_memory_clock, |
c836a412 | 407 | .get_pcie_lanes = NULL, |
771fe6b9 JG |
408 | .set_pcie_lanes = NULL, |
409 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
32b3c2ab JG |
410 | .set_surface_reg = r100_set_surface_reg, |
411 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 412 | .bandwidth_update = &rs600_bandwidth_update, |
429770b3 AD |
413 | .hpd_init = &rs600_hpd_init, |
414 | .hpd_fini = &rs600_hpd_fini, | |
415 | .hpd_sense = &rs600_hpd_sense, | |
416 | .hpd_set_polarity = &rs600_hpd_set_polarity, | |
062b389c | 417 | .ioctl_wait_idle = NULL, |
771fe6b9 JG |
418 | }; |
419 | ||
420 | ||
421 | /* | |
422 | * rs690,rs740 | |
423 | */ | |
3bc68535 JG |
424 | int rs690_init(struct radeon_device *rdev); |
425 | void rs690_fini(struct radeon_device *rdev); | |
426 | int rs690_resume(struct radeon_device *rdev); | |
427 | int rs690_suspend(struct radeon_device *rdev); | |
771fe6b9 JG |
428 | uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
429 | void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
c93bb85b | 430 | void rs690_bandwidth_update(struct radeon_device *rdev); |
771fe6b9 | 431 | static struct radeon_asic rs690_asic = { |
3bc68535 JG |
432 | .init = &rs690_init, |
433 | .fini = &rs690_fini, | |
434 | .suspend = &rs690_suspend, | |
435 | .resume = &rs690_resume, | |
28d52043 | 436 | .vga_set_state = &r100_vga_set_state, |
771fe6b9 | 437 | .gpu_reset = &r300_gpu_reset, |
771fe6b9 JG |
438 | .gart_tlb_flush = &rs400_gart_tlb_flush, |
439 | .gart_set_page = &rs400_gart_set_page, | |
3ce0a23d | 440 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 441 | .ring_start = &r300_ring_start, |
3ce0a23d JG |
442 | .ring_test = &r100_ring_test, |
443 | .ring_ib_execute = &r100_ring_ib_execute, | |
771fe6b9 | 444 | .irq_set = &rs600_irq_set, |
7ed220d7 MD |
445 | .irq_process = &rs600_irq_process, |
446 | .get_vblank_counter = &rs600_get_vblank_counter, | |
771fe6b9 JG |
447 | .fence_ring_emit = &r300_fence_ring_emit, |
448 | .cs_parse = &r300_cs_parse, | |
449 | .copy_blit = &r100_copy_blit, | |
44ca7478 PN |
450 | .copy_dma = &r200_copy_dma, |
451 | .copy = &r200_copy_dma, | |
7433874e | 452 | .get_engine_clock = &radeon_atom_get_engine_clock, |
771fe6b9 | 453 | .set_engine_clock = &radeon_atom_set_engine_clock, |
7433874e | 454 | .get_memory_clock = &radeon_atom_get_memory_clock, |
771fe6b9 | 455 | .set_memory_clock = &radeon_atom_set_memory_clock, |
c836a412 | 456 | .get_pcie_lanes = NULL, |
771fe6b9 JG |
457 | .set_pcie_lanes = NULL, |
458 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
e024e110 DA |
459 | .set_surface_reg = r100_set_surface_reg, |
460 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 461 | .bandwidth_update = &rs690_bandwidth_update, |
429770b3 AD |
462 | .hpd_init = &rs600_hpd_init, |
463 | .hpd_fini = &rs600_hpd_fini, | |
464 | .hpd_sense = &rs600_hpd_sense, | |
465 | .hpd_set_polarity = &rs600_hpd_set_polarity, | |
062b389c | 466 | .ioctl_wait_idle = NULL, |
771fe6b9 JG |
467 | }; |
468 | ||
469 | ||
470 | /* | |
471 | * rv515 | |
472 | */ | |
068a117c | 473 | int rv515_init(struct radeon_device *rdev); |
d39c3b89 | 474 | void rv515_fini(struct radeon_device *rdev); |
771fe6b9 | 475 | int rv515_gpu_reset(struct radeon_device *rdev); |
771fe6b9 JG |
476 | uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg); |
477 | void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
478 | void rv515_ring_start(struct radeon_device *rdev); | |
479 | uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg); | |
480 | void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
c93bb85b | 481 | void rv515_bandwidth_update(struct radeon_device *rdev); |
d39c3b89 JG |
482 | int rv515_resume(struct radeon_device *rdev); |
483 | int rv515_suspend(struct radeon_device *rdev); | |
771fe6b9 | 484 | static struct radeon_asic rv515_asic = { |
068a117c | 485 | .init = &rv515_init, |
d39c3b89 JG |
486 | .fini = &rv515_fini, |
487 | .suspend = &rv515_suspend, | |
488 | .resume = &rv515_resume, | |
28d52043 | 489 | .vga_set_state = &r100_vga_set_state, |
771fe6b9 | 490 | .gpu_reset = &rv515_gpu_reset, |
771fe6b9 JG |
491 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
492 | .gart_set_page = &rv370_pcie_gart_set_page, | |
3ce0a23d | 493 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 494 | .ring_start = &rv515_ring_start, |
3ce0a23d JG |
495 | .ring_test = &r100_ring_test, |
496 | .ring_ib_execute = &r100_ring_ib_execute, | |
7ed220d7 MD |
497 | .irq_set = &rs600_irq_set, |
498 | .irq_process = &rs600_irq_process, | |
499 | .get_vblank_counter = &rs600_get_vblank_counter, | |
771fe6b9 | 500 | .fence_ring_emit = &r300_fence_ring_emit, |
068a117c | 501 | .cs_parse = &r300_cs_parse, |
771fe6b9 | 502 | .copy_blit = &r100_copy_blit, |
44ca7478 | 503 | .copy_dma = &r200_copy_dma, |
771fe6b9 | 504 | .copy = &r100_copy_blit, |
7433874e | 505 | .get_engine_clock = &radeon_atom_get_engine_clock, |
771fe6b9 | 506 | .set_engine_clock = &radeon_atom_set_engine_clock, |
7433874e | 507 | .get_memory_clock = &radeon_atom_get_memory_clock, |
771fe6b9 | 508 | .set_memory_clock = &radeon_atom_set_memory_clock, |
c836a412 | 509 | .get_pcie_lanes = &rv370_get_pcie_lanes, |
771fe6b9 JG |
510 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
511 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
e024e110 DA |
512 | .set_surface_reg = r100_set_surface_reg, |
513 | .clear_surface_reg = r100_clear_surface_reg, | |
c93bb85b | 514 | .bandwidth_update = &rv515_bandwidth_update, |
429770b3 AD |
515 | .hpd_init = &rs600_hpd_init, |
516 | .hpd_fini = &rs600_hpd_fini, | |
517 | .hpd_sense = &rs600_hpd_sense, | |
518 | .hpd_set_polarity = &rs600_hpd_set_polarity, | |
062b389c | 519 | .ioctl_wait_idle = NULL, |
771fe6b9 JG |
520 | }; |
521 | ||
522 | ||
523 | /* | |
524 | * r520,rv530,rv560,rv570,r580 | |
525 | */ | |
d39c3b89 | 526 | int r520_init(struct radeon_device *rdev); |
f0ed1f65 | 527 | int r520_resume(struct radeon_device *rdev); |
771fe6b9 | 528 | static struct radeon_asic r520_asic = { |
d39c3b89 | 529 | .init = &r520_init, |
f0ed1f65 JG |
530 | .fini = &rv515_fini, |
531 | .suspend = &rv515_suspend, | |
532 | .resume = &r520_resume, | |
28d52043 | 533 | .vga_set_state = &r100_vga_set_state, |
771fe6b9 | 534 | .gpu_reset = &rv515_gpu_reset, |
771fe6b9 JG |
535 | .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, |
536 | .gart_set_page = &rv370_pcie_gart_set_page, | |
3ce0a23d | 537 | .cp_commit = &r100_cp_commit, |
771fe6b9 | 538 | .ring_start = &rv515_ring_start, |
3ce0a23d JG |
539 | .ring_test = &r100_ring_test, |
540 | .ring_ib_execute = &r100_ring_ib_execute, | |
7ed220d7 MD |
541 | .irq_set = &rs600_irq_set, |
542 | .irq_process = &rs600_irq_process, | |
543 | .get_vblank_counter = &rs600_get_vblank_counter, | |
771fe6b9 | 544 | .fence_ring_emit = &r300_fence_ring_emit, |
068a117c | 545 | .cs_parse = &r300_cs_parse, |
771fe6b9 | 546 | .copy_blit = &r100_copy_blit, |
44ca7478 | 547 | .copy_dma = &r200_copy_dma, |
771fe6b9 | 548 | .copy = &r100_copy_blit, |
7433874e | 549 | .get_engine_clock = &radeon_atom_get_engine_clock, |
771fe6b9 | 550 | .set_engine_clock = &radeon_atom_set_engine_clock, |
7433874e | 551 | .get_memory_clock = &radeon_atom_get_memory_clock, |
771fe6b9 | 552 | .set_memory_clock = &radeon_atom_set_memory_clock, |
c836a412 | 553 | .get_pcie_lanes = &rv370_get_pcie_lanes, |
771fe6b9 JG |
554 | .set_pcie_lanes = &rv370_set_pcie_lanes, |
555 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
e024e110 DA |
556 | .set_surface_reg = r100_set_surface_reg, |
557 | .clear_surface_reg = r100_clear_surface_reg, | |
f0ed1f65 | 558 | .bandwidth_update = &rv515_bandwidth_update, |
429770b3 AD |
559 | .hpd_init = &rs600_hpd_init, |
560 | .hpd_fini = &rs600_hpd_fini, | |
561 | .hpd_sense = &rs600_hpd_sense, | |
562 | .hpd_set_polarity = &rs600_hpd_set_polarity, | |
062b389c | 563 | .ioctl_wait_idle = NULL, |
771fe6b9 JG |
564 | }; |
565 | ||
566 | /* | |
3ce0a23d | 567 | * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880 |
771fe6b9 | 568 | */ |
3ce0a23d JG |
569 | int r600_init(struct radeon_device *rdev); |
570 | void r600_fini(struct radeon_device *rdev); | |
571 | int r600_suspend(struct radeon_device *rdev); | |
572 | int r600_resume(struct radeon_device *rdev); | |
28d52043 | 573 | void r600_vga_set_state(struct radeon_device *rdev, bool state); |
3ce0a23d JG |
574 | int r600_wb_init(struct radeon_device *rdev); |
575 | void r600_wb_fini(struct radeon_device *rdev); | |
576 | void r600_cp_commit(struct radeon_device *rdev); | |
577 | void r600_pcie_gart_tlb_flush(struct radeon_device *rdev); | |
771fe6b9 JG |
578 | uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg); |
579 | void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); | |
3ce0a23d JG |
580 | int r600_cs_parse(struct radeon_cs_parser *p); |
581 | void r600_fence_ring_emit(struct radeon_device *rdev, | |
582 | struct radeon_fence *fence); | |
583 | int r600_copy_dma(struct radeon_device *rdev, | |
584 | uint64_t src_offset, | |
585 | uint64_t dst_offset, | |
586 | unsigned num_pages, | |
587 | struct radeon_fence *fence); | |
588 | int r600_irq_process(struct radeon_device *rdev); | |
589 | int r600_irq_set(struct radeon_device *rdev); | |
590 | int r600_gpu_reset(struct radeon_device *rdev); | |
591 | int r600_set_surface_reg(struct radeon_device *rdev, int reg, | |
592 | uint32_t tiling_flags, uint32_t pitch, | |
593 | uint32_t offset, uint32_t obj_size); | |
594 | int r600_clear_surface_reg(struct radeon_device *rdev, int reg); | |
595 | void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); | |
3ce0a23d JG |
596 | int r600_ring_test(struct radeon_device *rdev); |
597 | int r600_copy_blit(struct radeon_device *rdev, | |
598 | uint64_t src_offset, uint64_t dst_offset, | |
599 | unsigned num_pages, struct radeon_fence *fence); | |
429770b3 AD |
600 | void r600_hpd_init(struct radeon_device *rdev); |
601 | void r600_hpd_fini(struct radeon_device *rdev); | |
602 | bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); | |
603 | void r600_hpd_set_polarity(struct radeon_device *rdev, | |
604 | enum radeon_hpd_id hpd); | |
062b389c | 605 | extern void r600_ioctl_wait_idle(struct radeon_device *rdev, struct radeon_bo *bo); |
3ce0a23d JG |
606 | |
607 | static struct radeon_asic r600_asic = { | |
3ce0a23d JG |
608 | .init = &r600_init, |
609 | .fini = &r600_fini, | |
610 | .suspend = &r600_suspend, | |
611 | .resume = &r600_resume, | |
612 | .cp_commit = &r600_cp_commit, | |
28d52043 | 613 | .vga_set_state = &r600_vga_set_state, |
3ce0a23d | 614 | .gpu_reset = &r600_gpu_reset, |
3ce0a23d JG |
615 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, |
616 | .gart_set_page = &rs600_gart_set_page, | |
3ce0a23d JG |
617 | .ring_test = &r600_ring_test, |
618 | .ring_ib_execute = &r600_ring_ib_execute, | |
3ce0a23d JG |
619 | .irq_set = &r600_irq_set, |
620 | .irq_process = &r600_irq_process, | |
d8f60cfc | 621 | .get_vblank_counter = &rs600_get_vblank_counter, |
3ce0a23d JG |
622 | .fence_ring_emit = &r600_fence_ring_emit, |
623 | .cs_parse = &r600_cs_parse, | |
624 | .copy_blit = &r600_copy_blit, | |
625 | .copy_dma = &r600_copy_blit, | |
a3812877 | 626 | .copy = &r600_copy_blit, |
7433874e | 627 | .get_engine_clock = &radeon_atom_get_engine_clock, |
3ce0a23d | 628 | .set_engine_clock = &radeon_atom_set_engine_clock, |
7433874e | 629 | .get_memory_clock = &radeon_atom_get_memory_clock, |
3ce0a23d | 630 | .set_memory_clock = &radeon_atom_set_memory_clock, |
aa5120d2 | 631 | .get_pcie_lanes = &rv370_get_pcie_lanes, |
3ce0a23d | 632 | .set_pcie_lanes = NULL, |
6d7f2d8d | 633 | .set_clock_gating = NULL, |
3ce0a23d JG |
634 | .set_surface_reg = r600_set_surface_reg, |
635 | .clear_surface_reg = r600_clear_surface_reg, | |
f0ed1f65 | 636 | .bandwidth_update = &rv515_bandwidth_update, |
429770b3 AD |
637 | .hpd_init = &r600_hpd_init, |
638 | .hpd_fini = &r600_hpd_fini, | |
639 | .hpd_sense = &r600_hpd_sense, | |
640 | .hpd_set_polarity = &r600_hpd_set_polarity, | |
062b389c | 641 | .ioctl_wait_idle = r600_ioctl_wait_idle, |
3ce0a23d JG |
642 | }; |
643 | ||
644 | /* | |
645 | * rv770,rv730,rv710,rv740 | |
646 | */ | |
647 | int rv770_init(struct radeon_device *rdev); | |
648 | void rv770_fini(struct radeon_device *rdev); | |
649 | int rv770_suspend(struct radeon_device *rdev); | |
650 | int rv770_resume(struct radeon_device *rdev); | |
651 | int rv770_gpu_reset(struct radeon_device *rdev); | |
652 | ||
653 | static struct radeon_asic rv770_asic = { | |
3ce0a23d JG |
654 | .init = &rv770_init, |
655 | .fini = &rv770_fini, | |
656 | .suspend = &rv770_suspend, | |
657 | .resume = &rv770_resume, | |
658 | .cp_commit = &r600_cp_commit, | |
3ce0a23d | 659 | .gpu_reset = &rv770_gpu_reset, |
28d52043 | 660 | .vga_set_state = &r600_vga_set_state, |
3ce0a23d JG |
661 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, |
662 | .gart_set_page = &rs600_gart_set_page, | |
3ce0a23d JG |
663 | .ring_test = &r600_ring_test, |
664 | .ring_ib_execute = &r600_ring_ib_execute, | |
3ce0a23d JG |
665 | .irq_set = &r600_irq_set, |
666 | .irq_process = &r600_irq_process, | |
d8f60cfc | 667 | .get_vblank_counter = &rs600_get_vblank_counter, |
3ce0a23d JG |
668 | .fence_ring_emit = &r600_fence_ring_emit, |
669 | .cs_parse = &r600_cs_parse, | |
670 | .copy_blit = &r600_copy_blit, | |
671 | .copy_dma = &r600_copy_blit, | |
a3812877 | 672 | .copy = &r600_copy_blit, |
7433874e | 673 | .get_engine_clock = &radeon_atom_get_engine_clock, |
3ce0a23d | 674 | .set_engine_clock = &radeon_atom_set_engine_clock, |
7433874e | 675 | .get_memory_clock = &radeon_atom_get_memory_clock, |
3ce0a23d | 676 | .set_memory_clock = &radeon_atom_set_memory_clock, |
aa5120d2 | 677 | .get_pcie_lanes = &rv370_get_pcie_lanes, |
3ce0a23d JG |
678 | .set_pcie_lanes = NULL, |
679 | .set_clock_gating = &radeon_atom_set_clock_gating, | |
680 | .set_surface_reg = r600_set_surface_reg, | |
681 | .clear_surface_reg = r600_clear_surface_reg, | |
f0ed1f65 | 682 | .bandwidth_update = &rv515_bandwidth_update, |
429770b3 AD |
683 | .hpd_init = &r600_hpd_init, |
684 | .hpd_fini = &r600_hpd_fini, | |
685 | .hpd_sense = &r600_hpd_sense, | |
686 | .hpd_set_polarity = &r600_hpd_set_polarity, | |
062b389c | 687 | .ioctl_wait_idle = r600_ioctl_wait_idle, |
3ce0a23d | 688 | }; |
771fe6b9 | 689 | |
bcc1c2a1 AD |
690 | /* |
691 | * evergreen | |
692 | */ | |
693 | int evergreen_init(struct radeon_device *rdev); | |
694 | void evergreen_fini(struct radeon_device *rdev); | |
695 | int evergreen_suspend(struct radeon_device *rdev); | |
696 | int evergreen_resume(struct radeon_device *rdev); | |
697 | int evergreen_gpu_reset(struct radeon_device *rdev); | |
698 | void evergreen_bandwidth_update(struct radeon_device *rdev); | |
699 | void evergreen_hpd_init(struct radeon_device *rdev); | |
700 | void evergreen_hpd_fini(struct radeon_device *rdev); | |
701 | bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); | |
702 | void evergreen_hpd_set_polarity(struct radeon_device *rdev, | |
703 | enum radeon_hpd_id hpd); | |
704 | ||
705 | static struct radeon_asic evergreen_asic = { | |
706 | .init = &evergreen_init, | |
707 | .fini = &evergreen_fini, | |
708 | .suspend = &evergreen_suspend, | |
709 | .resume = &evergreen_resume, | |
710 | .cp_commit = NULL, | |
711 | .gpu_reset = &evergreen_gpu_reset, | |
712 | .vga_set_state = &r600_vga_set_state, | |
713 | .gart_tlb_flush = &r600_pcie_gart_tlb_flush, | |
714 | .gart_set_page = &rs600_gart_set_page, | |
715 | .ring_test = NULL, | |
716 | .ring_ib_execute = NULL, | |
717 | .irq_set = NULL, | |
718 | .irq_process = NULL, | |
719 | .get_vblank_counter = NULL, | |
720 | .fence_ring_emit = NULL, | |
721 | .cs_parse = NULL, | |
722 | .copy_blit = NULL, | |
723 | .copy_dma = NULL, | |
724 | .copy = NULL, | |
725 | .get_engine_clock = &radeon_atom_get_engine_clock, | |
726 | .set_engine_clock = &radeon_atom_set_engine_clock, | |
727 | .get_memory_clock = &radeon_atom_get_memory_clock, | |
728 | .set_memory_clock = &radeon_atom_set_memory_clock, | |
729 | .set_pcie_lanes = NULL, | |
730 | .set_clock_gating = NULL, | |
731 | .set_surface_reg = r600_set_surface_reg, | |
732 | .clear_surface_reg = r600_clear_surface_reg, | |
733 | .bandwidth_update = &evergreen_bandwidth_update, | |
734 | .hpd_init = &evergreen_hpd_init, | |
735 | .hpd_fini = &evergreen_hpd_fini, | |
736 | .hpd_sense = &evergreen_hpd_sense, | |
737 | .hpd_set_polarity = &evergreen_hpd_set_polarity, | |
738 | }; | |
739 | ||
771fe6b9 | 740 | #endif |