Lines Matching refs:mc

198 void amdgpu_gmc_vram_location(struct amdgpu_device *adev, struct amdgpu_gmc *mc,  in amdgpu_gmc_vram_location()  argument
203 mc->vram_start = base; in amdgpu_gmc_vram_location()
204 mc->vram_end = mc->vram_start + mc->mc_vram_size - 1; in amdgpu_gmc_vram_location()
205 if (limit && limit < mc->real_vram_size) in amdgpu_gmc_vram_location()
206 mc->real_vram_size = limit; in amdgpu_gmc_vram_location()
208 if (mc->xgmi.num_physical_nodes == 0) { in amdgpu_gmc_vram_location()
209 mc->fb_start = mc->vram_start; in amdgpu_gmc_vram_location()
210 mc->fb_end = mc->vram_end; in amdgpu_gmc_vram_location()
213 mc->mc_vram_size >> 20, mc->vram_start, in amdgpu_gmc_vram_location()
214 mc->vram_end, mc->real_vram_size >> 20); in amdgpu_gmc_vram_location()
233 void amdgpu_gmc_sysvm_location(struct amdgpu_device *adev, struct amdgpu_gmc *mc) in amdgpu_gmc_sysvm_location() argument
236 u64 hive_vram_end = mc->xgmi.node_segment_size * mc->xgmi.num_physical_nodes - 1; in amdgpu_gmc_sysvm_location()
237 mc->vram_start = mc->xgmi.node_segment_size * mc->xgmi.physical_node_id; in amdgpu_gmc_sysvm_location()
238 mc->vram_end = mc->vram_start + mc->xgmi.node_segment_size - 1; in amdgpu_gmc_sysvm_location()
239 mc->gart_start = hive_vram_end + 1; in amdgpu_gmc_sysvm_location()
240 mc->gart_end = mc->gart_start + mc->gart_size - 1; in amdgpu_gmc_sysvm_location()
241 mc->fb_start = hive_vram_start; in amdgpu_gmc_sysvm_location()
242 mc->fb_end = hive_vram_end; in amdgpu_gmc_sysvm_location()
244 mc->mc_vram_size >> 20, mc->vram_start, in amdgpu_gmc_sysvm_location()
245 mc->vram_end, mc->real_vram_size >> 20); in amdgpu_gmc_sysvm_location()
247 mc->gart_size >> 20, mc->gart_start, mc->gart_end); in amdgpu_gmc_sysvm_location()
260 void amdgpu_gmc_gart_location(struct amdgpu_device *adev, struct amdgpu_gmc *mc) in amdgpu_gmc_gart_location() argument
270 size_bf = mc->fb_start; in amdgpu_gmc_gart_location()
271 size_af = max_mc_address + 1 - ALIGN(mc->fb_end + 1, four_gb); in amdgpu_gmc_gart_location()
273 if (mc->gart_size > max(size_bf, size_af)) { in amdgpu_gmc_gart_location()
275 mc->gart_size = max(size_bf, size_af); in amdgpu_gmc_gart_location()
278 if ((size_bf >= mc->gart_size && size_bf < size_af) || in amdgpu_gmc_gart_location()
279 (size_af < mc->gart_size)) in amdgpu_gmc_gart_location()
280 mc->gart_start = 0; in amdgpu_gmc_gart_location()
282 mc->gart_start = max_mc_address - mc->gart_size + 1; in amdgpu_gmc_gart_location()
284 mc->gart_start &= ~(four_gb - 1); in amdgpu_gmc_gart_location()
285 mc->gart_end = mc->gart_start + mc->gart_size - 1; in amdgpu_gmc_gart_location()
287 mc->gart_size >> 20, mc->gart_start, mc->gart_end); in amdgpu_gmc_gart_location()
301 void amdgpu_gmc_agp_location(struct amdgpu_device *adev, struct amdgpu_gmc *mc) in amdgpu_gmc_agp_location() argument
308 mc->agp_start = 0xffffffffffff; in amdgpu_gmc_agp_location()
309 mc->agp_end = 0x0; in amdgpu_gmc_agp_location()
310 mc->agp_size = 0; in amdgpu_gmc_agp_location()
315 if (mc->fb_start > mc->gart_start) { in amdgpu_gmc_agp_location()
316 size_bf = (mc->fb_start & sixteen_gb_mask) - in amdgpu_gmc_agp_location()
317 ALIGN(mc->gart_end + 1, sixteen_gb); in amdgpu_gmc_agp_location()
318 size_af = mc->mc_mask + 1 - ALIGN(mc->fb_end + 1, sixteen_gb); in amdgpu_gmc_agp_location()
320 size_bf = mc->fb_start & sixteen_gb_mask; in amdgpu_gmc_agp_location()
321 size_af = (mc->gart_start & sixteen_gb_mask) - in amdgpu_gmc_agp_location()
322 ALIGN(mc->fb_end + 1, sixteen_gb); in amdgpu_gmc_agp_location()
326 mc->agp_start = (mc->fb_start - size_bf) & sixteen_gb_mask; in amdgpu_gmc_agp_location()
327 mc->agp_size = size_bf; in amdgpu_gmc_agp_location()
329 mc->agp_start = ALIGN(mc->fb_end + 1, sixteen_gb); in amdgpu_gmc_agp_location()
330 mc->agp_size = size_af; in amdgpu_gmc_agp_location()
333 mc->agp_end = mc->agp_start + mc->agp_size - 1; in amdgpu_gmc_agp_location()
335 mc->agp_size >> 20, mc->agp_start, mc->agp_end); in amdgpu_gmc_agp_location()