cache-smp.c 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106
  1. /* SMP global caching code
  2. *
  3. * Copyright (C) 2010 Red Hat, Inc. All Rights Reserved.
  4. * Written by David Howells (dhowells@redhat.com)
  5. *
  6. * This program is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU General Public Licence
  8. * as published by the Free Software Foundation; either version
  9. * 2 of the Licence, or (at your option) any later version.
  10. */
  11. #include <linux/module.h>
  12. #include <linux/mm.h>
  13. #include <linux/mman.h>
  14. #include <linux/threads.h>
  15. #include <linux/interrupt.h>
  16. #include <asm/page.h>
  17. #include <asm/pgtable.h>
  18. #include <asm/processor.h>
  19. #include <asm/cacheflush.h>
  20. #include <asm/io.h>
  21. #include <asm/uaccess.h>
  22. #include <asm/smp.h>
  23. #include "cache-smp.h"
  24. DEFINE_SPINLOCK(smp_cache_lock);
  25. static unsigned long smp_cache_mask;
  26. static unsigned long smp_cache_start;
  27. static unsigned long smp_cache_end;
  28. static cpumask_t smp_cache_ipi_map; /* Bitmask of cache IPI done CPUs */
  29. /**
  30. * smp_cache_interrupt - Handle IPI request to flush caches.
  31. *
  32. * Handle a request delivered by IPI to flush the current CPU's
  33. * caches. The parameters are stored in smp_cache_*.
  34. */
  35. void smp_cache_interrupt(void)
  36. {
  37. unsigned long opr_mask = smp_cache_mask;
  38. switch ((enum smp_dcache_ops)(opr_mask & SMP_DCACHE_OP_MASK)) {
  39. case SMP_DCACHE_NOP:
  40. break;
  41. case SMP_DCACHE_INV:
  42. mn10300_local_dcache_inv();
  43. break;
  44. case SMP_DCACHE_INV_RANGE:
  45. mn10300_local_dcache_inv_range(smp_cache_start, smp_cache_end);
  46. break;
  47. case SMP_DCACHE_FLUSH:
  48. mn10300_local_dcache_flush();
  49. break;
  50. case SMP_DCACHE_FLUSH_RANGE:
  51. mn10300_local_dcache_flush_range(smp_cache_start,
  52. smp_cache_end);
  53. break;
  54. case SMP_DCACHE_FLUSH_INV:
  55. mn10300_local_dcache_flush_inv();
  56. break;
  57. case SMP_DCACHE_FLUSH_INV_RANGE:
  58. mn10300_local_dcache_flush_inv_range(smp_cache_start,
  59. smp_cache_end);
  60. break;
  61. }
  62. switch ((enum smp_icache_ops)(opr_mask & SMP_ICACHE_OP_MASK)) {
  63. case SMP_ICACHE_NOP:
  64. break;
  65. case SMP_ICACHE_INV:
  66. mn10300_local_icache_inv();
  67. break;
  68. case SMP_ICACHE_INV_RANGE:
  69. mn10300_local_icache_inv_range(smp_cache_start, smp_cache_end);
  70. break;
  71. }
  72. cpumask_clear_cpu(smp_processor_id(), &smp_cache_ipi_map);
  73. }
  74. /**
  75. * smp_cache_call - Issue an IPI to request the other CPUs flush caches
  76. * @opr_mask: Cache operation flags
  77. * @start: Start address of request
  78. * @end: End address of request
  79. *
  80. * Send cache flush IPI to other CPUs. This invokes smp_cache_interrupt()
  81. * above on those other CPUs and then waits for them to finish.
  82. *
  83. * The caller must hold smp_cache_lock.
  84. */
  85. void smp_cache_call(unsigned long opr_mask,
  86. unsigned long start, unsigned long end)
  87. {
  88. smp_cache_mask = opr_mask;
  89. smp_cache_start = start;
  90. smp_cache_end = end;
  91. cpumask_copy(&smp_cache_ipi_map, cpu_online_mask);
  92. cpumask_clear_cpu(smp_processor_id(), &smp_cache_ipi_map);
  93. send_IPI_allbutself(FLUSH_CACHE_IPI);
  94. while (!cpumask_empty(&smp_cache_ipi_map))
  95. /* nothing. lockup detection does not belong here */
  96. mb();
  97. }