Author | Tokens | Token Proportion | Commits | Commit Proportion |
---|---|---|---|---|
Christoph Lameter | 658 | 39.03% | 4 | 10.00% |
Peter Zijlstra | 438 | 25.98% | 2 | 5.00% |
Tejun Heo | 387 | 22.95% | 6 | 15.00% |
Mike Travis | 52 | 3.08% | 2 | 5.00% |
Rusty Russell | 38 | 2.25% | 4 | 10.00% |
Jeremy Fitzhardinge | 20 | 1.19% | 1 | 2.50% |
Nicholas Piggin | 16 | 0.95% | 1 | 2.50% |
Brian Gerst | 15 | 0.89% | 3 | 7.50% |
Mark Rutland | 15 | 0.89% | 1 | 2.50% |
Ingo Molnar | 13 | 0.77% | 2 | 5.00% |
Linus Torvalds (pre-git) | 9 | 0.53% | 3 | 7.50% |
David Howells | 6 | 0.36% | 2 | 5.00% |
Hugh Dickins | 5 | 0.30% | 1 | 2.50% |
Eric Dumazet | 4 | 0.24% | 1 | 2.50% |
David Mosberger-Tang | 2 | 0.12% | 1 | 2.50% |
Luc Van Oostenryck | 2 | 0.12% | 1 | 2.50% |
Arnd Bergmann | 2 | 0.12% | 1 | 2.50% |
Greg Kroah-Hartman | 1 | 0.06% | 1 | 2.50% |
Nathan Chancellor | 1 | 0.06% | 1 | 2.50% |
Denys Vlasenko | 1 | 0.06% | 1 | 2.50% |
Andrew Morton | 1 | 0.06% | 1 | 2.50% |
Total | 1686 | 40 |
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547
/* SPDX-License-Identifier: GPL-2.0 */ #ifndef _ASM_GENERIC_PERCPU_H_ #define _ASM_GENERIC_PERCPU_H_ #include <linux/compiler.h> #include <linux/threads.h> #include <linux/percpu-defs.h> #ifdef CONFIG_SMP /* * per_cpu_offset() is the offset that has to be added to a * percpu variable to get to the instance for a certain processor. * * Most arches use the __per_cpu_offset array for those offsets but * some arches have their own ways of determining the offset (x86_64, s390). */ #ifndef __per_cpu_offset extern unsigned long __per_cpu_offset[NR_CPUS]; #define per_cpu_offset(x) (__per_cpu_offset[x]) #endif /* * Determine the offset for the currently active processor. * An arch may define __my_cpu_offset to provide a more effective * means of obtaining the offset to the per cpu variables of the * current processor. */ #ifndef __my_cpu_offset #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id()) #endif #ifdef CONFIG_DEBUG_PREEMPT #define my_cpu_offset per_cpu_offset(smp_processor_id()) #else #define my_cpu_offset __my_cpu_offset #endif /* * Arch may define arch_raw_cpu_ptr() to provide more efficient address * translations for raw_cpu_ptr(). */ #ifndef arch_raw_cpu_ptr #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset) #endif #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA extern void setup_per_cpu_areas(void); #endif #endif /* SMP */ #ifndef PER_CPU_BASE_SECTION #ifdef CONFIG_SMP #define PER_CPU_BASE_SECTION ".data..percpu" #else #define PER_CPU_BASE_SECTION ".data" #endif #endif #ifndef PER_CPU_ATTRIBUTES #define PER_CPU_ATTRIBUTES #endif #define raw_cpu_generic_read(pcp) \ ({ \ *raw_cpu_ptr(&(pcp)); \ }) #define raw_cpu_generic_to_op(pcp, val, op) \ do { \ *raw_cpu_ptr(&(pcp)) op val; \ } while (0) #define raw_cpu_generic_add_return(pcp, val) \ ({ \ typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \ \ *__p += val; \ *__p; \ }) #define raw_cpu_generic_xchg(pcp, nval) \ ({ \ typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \ typeof(pcp) __ret; \ __ret = *__p; \ *__p = nval; \ __ret; \ }) #define __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, _cmpxchg) \ ({ \ typeof(pcp) __val, __old = *(ovalp); \ __val = _cmpxchg(pcp, __old, nval); \ if (__val != __old) \ *(ovalp) = __val; \ __val == __old; \ }) #define raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) \ ({ \ typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \ typeof(pcp) __val = *__p, ___old = *(ovalp); \ bool __ret; \ if (__val == ___old) { \ *__p = nval; \ __ret = true; \ } else { \ *(ovalp) = __val; \ __ret = false; \ } \ __ret; \ }) #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \ ({ \ typeof(pcp) __old = (oval); \ raw_cpu_generic_try_cmpxchg(pcp, &__old, nval); \ __old; \ }) #define __this_cpu_generic_read_nopreempt(pcp) \ ({ \ typeof(pcp) ___ret; \ preempt_disable_notrace(); \ ___ret = READ_ONCE(*raw_cpu_ptr(&(pcp))); \ preempt_enable_notrace(); \ ___ret; \ }) #define __this_cpu_generic_read_noirq(pcp) \ ({ \ typeof(pcp) ___ret; \ unsigned long ___flags; \ raw_local_irq_save(___flags); \ ___ret = raw_cpu_generic_read(pcp); \ raw_local_irq_restore(___flags); \ ___ret; \ }) #define this_cpu_generic_read(pcp) \ ({ \ typeof(pcp) __ret; \ if (__native_word(pcp)) \ __ret = __this_cpu_generic_read_nopreempt(pcp); \ else \ __ret = __this_cpu_generic_read_noirq(pcp); \ __ret; \ }) #define this_cpu_generic_to_op(pcp, val, op) \ do { \ unsigned long __flags; \ raw_local_irq_save(__flags); \ raw_cpu_generic_to_op(pcp, val, op); \ raw_local_irq_restore(__flags); \ } while (0) #define this_cpu_generic_add_return(pcp, val) \ ({ \ typeof(pcp) __ret; \ unsigned long __flags; \ raw_local_irq_save(__flags); \ __ret = raw_cpu_generic_add_return(pcp, val); \ raw_local_irq_restore(__flags); \ __ret; \ }) #define this_cpu_generic_xchg(pcp, nval) \ ({ \ typeof(pcp) __ret; \ unsigned long __flags; \ raw_local_irq_save(__flags); \ __ret = raw_cpu_generic_xchg(pcp, nval); \ raw_local_irq_restore(__flags); \ __ret; \ }) #define this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) \ ({ \ bool __ret; \ unsigned long __flags; \ raw_local_irq_save(__flags); \ __ret = raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval); \ raw_local_irq_restore(__flags); \ __ret; \ }) #define this_cpu_generic_cmpxchg(pcp, oval, nval) \ ({ \ typeof(pcp) __ret; \ unsigned long __flags; \ raw_local_irq_save(__flags); \ __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval); \ raw_local_irq_restore(__flags); \ __ret; \ }) #ifndef raw_cpu_read_1 #define raw_cpu_read_1(pcp) raw_cpu_generic_read(pcp) #endif #ifndef raw_cpu_read_2 #define raw_cpu_read_2(pcp) raw_cpu_generic_read(pcp) #endif #ifndef raw_cpu_read_4 #define raw_cpu_read_4(pcp) raw_cpu_generic_read(pcp) #endif #ifndef raw_cpu_read_8 #define raw_cpu_read_8(pcp) raw_cpu_generic_read(pcp) #endif #ifndef raw_cpu_write_1 #define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =) #endif #ifndef raw_cpu_write_2 #define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =) #endif #ifndef raw_cpu_write_4 #define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =) #endif #ifndef raw_cpu_write_8 #define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =) #endif #ifndef raw_cpu_add_1 #define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=) #endif #ifndef raw_cpu_add_2 #define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=) #endif #ifndef raw_cpu_add_4 #define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=) #endif #ifndef raw_cpu_add_8 #define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=) #endif #ifndef raw_cpu_and_1 #define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=) #endif #ifndef raw_cpu_and_2 #define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=) #endif #ifndef raw_cpu_and_4 #define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=) #endif #ifndef raw_cpu_and_8 #define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=) #endif #ifndef raw_cpu_or_1 #define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=) #endif #ifndef raw_cpu_or_2 #define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=) #endif #ifndef raw_cpu_or_4 #define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=) #endif #ifndef raw_cpu_or_8 #define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=) #endif #ifndef raw_cpu_add_return_1 #define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val) #endif #ifndef raw_cpu_add_return_2 #define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val) #endif #ifndef raw_cpu_add_return_4 #define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val) #endif #ifndef raw_cpu_add_return_8 #define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val) #endif #ifndef raw_cpu_xchg_1 #define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval) #endif #ifndef raw_cpu_xchg_2 #define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval) #endif #ifndef raw_cpu_xchg_4 #define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval) #endif #ifndef raw_cpu_xchg_8 #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval) #endif #ifndef raw_cpu_try_cmpxchg_1 #ifdef raw_cpu_cmpxchg_1 #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \ __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_1) #else #define raw_cpu_try_cmpxchg_1(pcp, ovalp, nval) \ raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) #endif #endif #ifndef raw_cpu_try_cmpxchg_2 #ifdef raw_cpu_cmpxchg_2 #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \ __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_2) #else #define raw_cpu_try_cmpxchg_2(pcp, ovalp, nval) \ raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) #endif #endif #ifndef raw_cpu_try_cmpxchg_4 #ifdef raw_cpu_cmpxchg_4 #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \ __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_4) #else #define raw_cpu_try_cmpxchg_4(pcp, ovalp, nval) \ raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) #endif #endif #ifndef raw_cpu_try_cmpxchg_8 #ifdef raw_cpu_cmpxchg_8 #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \ __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg_8) #else #define raw_cpu_try_cmpxchg_8(pcp, ovalp, nval) \ raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) #endif #endif #ifndef raw_cpu_try_cmpxchg64 #ifdef raw_cpu_cmpxchg64 #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \ __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg64) #else #define raw_cpu_try_cmpxchg64(pcp, ovalp, nval) \ raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) #endif #endif #ifndef raw_cpu_try_cmpxchg128 #ifdef raw_cpu_cmpxchg128 #define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \ __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, raw_cpu_cmpxchg128) #else #define raw_cpu_try_cmpxchg128(pcp, ovalp, nval) \ raw_cpu_generic_try_cmpxchg(pcp, ovalp, nval) #endif #endif #ifndef raw_cpu_cmpxchg_1 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \ raw_cpu_generic_cmpxchg(pcp, oval, nval) #endif #ifndef raw_cpu_cmpxchg_2 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \ raw_cpu_generic_cmpxchg(pcp, oval, nval) #endif #ifndef raw_cpu_cmpxchg_4 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \ raw_cpu_generic_cmpxchg(pcp, oval, nval) #endif #ifndef raw_cpu_cmpxchg_8 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \ raw_cpu_generic_cmpxchg(pcp, oval, nval) #endif #ifndef raw_cpu_cmpxchg64 #define raw_cpu_cmpxchg64(pcp, oval, nval) \ raw_cpu_generic_cmpxchg(pcp, oval, nval) #endif #ifndef raw_cpu_cmpxchg128 #define raw_cpu_cmpxchg128(pcp, oval, nval) \ raw_cpu_generic_cmpxchg(pcp, oval, nval) #endif #ifndef this_cpu_read_1 #define this_cpu_read_1(pcp) this_cpu_generic_read(pcp) #endif #ifndef this_cpu_read_2 #define this_cpu_read_2(pcp) this_cpu_generic_read(pcp) #endif #ifndef this_cpu_read_4 #define this_cpu_read_4(pcp) this_cpu_generic_read(pcp) #endif #ifndef this_cpu_read_8 #define this_cpu_read_8(pcp) this_cpu_generic_read(pcp) #endif #ifndef this_cpu_write_1 #define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =) #endif #ifndef this_cpu_write_2 #define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =) #endif #ifndef this_cpu_write_4 #define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =) #endif #ifndef this_cpu_write_8 #define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =) #endif #ifndef this_cpu_add_1 #define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=) #endif #ifndef this_cpu_add_2 #define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=) #endif #ifndef this_cpu_add_4 #define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=) #endif #ifndef this_cpu_add_8 #define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=) #endif #ifndef this_cpu_and_1 #define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=) #endif #ifndef this_cpu_and_2 #define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=) #endif #ifndef this_cpu_and_4 #define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=) #endif #ifndef this_cpu_and_8 #define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=) #endif #ifndef this_cpu_or_1 #define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=) #endif #ifndef this_cpu_or_2 #define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=) #endif #ifndef this_cpu_or_4 #define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=) #endif #ifndef this_cpu_or_8 #define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=) #endif #ifndef this_cpu_add_return_1 #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val) #endif #ifndef this_cpu_add_return_2 #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val) #endif #ifndef this_cpu_add_return_4 #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val) #endif #ifndef this_cpu_add_return_8 #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val) #endif #ifndef this_cpu_xchg_1 #define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval) #endif #ifndef this_cpu_xchg_2 #define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval) #endif #ifndef this_cpu_xchg_4 #define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval) #endif #ifndef this_cpu_xchg_8 #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval) #endif #ifndef this_cpu_try_cmpxchg_1 #ifdef this_cpu_cmpxchg_1 #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \ __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_1) #else #define this_cpu_try_cmpxchg_1(pcp, ovalp, nval) \ this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) #endif #endif #ifndef this_cpu_try_cmpxchg_2 #ifdef this_cpu_cmpxchg_2 #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \ __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_2) #else #define this_cpu_try_cmpxchg_2(pcp, ovalp, nval) \ this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) #endif #endif #ifndef this_cpu_try_cmpxchg_4 #ifdef this_cpu_cmpxchg_4 #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \ __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_4) #else #define this_cpu_try_cmpxchg_4(pcp, ovalp, nval) \ this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) #endif #endif #ifndef this_cpu_try_cmpxchg_8 #ifdef this_cpu_cmpxchg_8 #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \ __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg_8) #else #define this_cpu_try_cmpxchg_8(pcp, ovalp, nval) \ this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) #endif #endif #ifndef this_cpu_try_cmpxchg64 #ifdef this_cpu_cmpxchg64 #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \ __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg64) #else #define this_cpu_try_cmpxchg64(pcp, ovalp, nval) \ this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) #endif #endif #ifndef this_cpu_try_cmpxchg128 #ifdef this_cpu_cmpxchg128 #define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \ __cpu_fallback_try_cmpxchg(pcp, ovalp, nval, this_cpu_cmpxchg128) #else #define this_cpu_try_cmpxchg128(pcp, ovalp, nval) \ this_cpu_generic_try_cmpxchg(pcp, ovalp, nval) #endif #endif #ifndef this_cpu_cmpxchg_1 #define this_cpu_cmpxchg_1(pcp, oval, nval) \ this_cpu_generic_cmpxchg(pcp, oval, nval) #endif #ifndef this_cpu_cmpxchg_2 #define this_cpu_cmpxchg_2(pcp, oval, nval) \ this_cpu_generic_cmpxchg(pcp, oval, nval) #endif #ifndef this_cpu_cmpxchg_4 #define this_cpu_cmpxchg_4(pcp, oval, nval) \ this_cpu_generic_cmpxchg(pcp, oval, nval) #endif #ifndef this_cpu_cmpxchg_8 #define this_cpu_cmpxchg_8(pcp, oval, nval) \ this_cpu_generic_cmpxchg(pcp, oval, nval) #endif #ifndef this_cpu_cmpxchg64 #define this_cpu_cmpxchg64(pcp, oval, nval) \ this_cpu_generic_cmpxchg(pcp, oval, nval) #endif #ifndef this_cpu_cmpxchg128 #define this_cpu_cmpxchg128(pcp, oval, nval) \ this_cpu_generic_cmpxchg(pcp, oval, nval) #endif #endif /* _ASM_GENERIC_PERCPU_H_ */
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.
Created with Cregit http://github.com/cregit/cregit
Version 2.0-RC1