armv8: caches: Added routine to set non cacheable region

Added routine mmu_set_region_dcache_behaviour() to set a
particular region as non cacheable.

Define dummy routine for mmu_set_region_dcache_behaviour()
to handle incase of dcache off.

Signed-off-by: Siva Durga Prasad Paladugu <sivadur@xilinx.com>
Acked-by: Michal Simek <michal.simek@xilinx.com>
diff --git a/arch/arm/cpu/armv8/cache_v8.c b/arch/arm/cpu/armv8/cache_v8.c
index 254a629..c22f7b6 100644
--- a/arch/arm/cpu/armv8/cache_v8.c
+++ b/arch/arm/cpu/armv8/cache_v8.c
@@ -139,6 +139,37 @@
 	return (get_sctlr() & CR_C) != 0;
 }
 
+u64 *__weak arch_get_page_table(void) {
+	puts("No page table offset defined\n");
+
+	return NULL;
+}
+
+void mmu_set_region_dcache_behaviour(phys_addr_t start, size_t size,
+				     enum dcache_option option)
+{
+	u64 *page_table = arch_get_page_table();
+	u64 upto, end;
+
+	if (page_table == NULL)
+		return;
+
+	end = ALIGN(start + size, (1 << MMU_SECTION_SHIFT)) >>
+	      MMU_SECTION_SHIFT;
+	start = start >> MMU_SECTION_SHIFT;
+	for (upto = start; upto < end; upto++) {
+		page_table[upto] &= ~PMD_ATTRINDX_MASK;
+		page_table[upto] |= PMD_ATTRINDX(option);
+	}
+	asm volatile("dsb sy");
+	__asm_invalidate_tlb_all();
+	asm volatile("dsb sy");
+	asm volatile("isb");
+	start = start << MMU_SECTION_SHIFT;
+	end = end << MMU_SECTION_SHIFT;
+	flush_dcache_range(start, end);
+	asm volatile("dsb sy");
+}
 #else	/* CONFIG_SYS_DCACHE_OFF */
 
 void invalidate_dcache_all(void)
@@ -170,6 +201,11 @@
 	return 0;
 }
 
+void mmu_set_region_dcache_behaviour(phys_addr_t start, size_t size,
+				     enum dcache_option option)
+{
+}
+
 #endif	/* CONFIG_SYS_DCACHE_OFF */
 
 #ifndef CONFIG_SYS_ICACHE_OFF