Project

General

Profile

Backport #7123 » backport_r37088_r37083_r37082_r37076_r37075_to_193.patch

authorNari (Narihiro Nakamura), 11/13/2012 06:13 PM

View differences:

configure.in
AC_DEFINE_UNQUOTED(STACK_END_ADDRESS, $rb_cv_stack_end_address)
fi
AC_CACHE_CHECK(for gc_mark and gc_children stack frame approximate size(word), rb_cv_gc_mark_stackframe_word,
[save_CFLAGS="$CFLAGS"
CFLAGS="-O0"
AC_TRY_RUN([
int word;
char *stack_start;
void
set_stackframe_word()
{
int dumy = 42;
int diff;
if (stack_start < (char *)&dumy) {
diff = (int)((char *)&dumy - stack_start);
}
else {
diff = (int)(stack_start - (char *)&dumy);
}
word = (diff/sizeof(void *));
if ((diff % sizeof(void *)) != 0) {
word++;
}
}
void
gc_mark_children(void *p1, void *p2, int lev)
{
void *obj = p2;
set_stackframe_word(p1,p2,lev);
}
void
gc_mark(void *p1, void *p2, int lev)
{
void *obj = p2;
gc_mark_children(p1,p2,lev++);
}
int
main() {
int dumy = 42;
stack_start = (char *)&dumy;
gc_mark(0, 0, 255);
return word;
}
],
[rb_cv_gc_mark_stackframe_word="$?"],
[rb_cv_gc_mark_stackframe_word="$?"],
[rb_cv_gc_mark_stackframe_word="30"])
CFLAGS="$save_CFLAGS"])
AC_DEFINE_UNQUOTED(GC_MARK_STACKFRAME_WORD, $rb_cv_gc_mark_stackframe_word)
dnl Checks for library functions.
AC_TYPE_GETGROUPS
AC_TYPE_SIGNAL
gc.c
#define nomem_error GET_VM()->special_exceptions[ruby_error_nomemory]
#define MARK_STACK_MAX 1024
int ruby_gc_debug_indent = 0;
/* for GC profile */
......
struct gc_list *next;
};
#define STACK_CHUNK_SIZE 500
typedef struct stack_chunk {
VALUE data[STACK_CHUNK_SIZE];
struct stack_chunk *next;
} stack_chunk_t;
typedef struct mark_stack {
stack_chunk_t *chunk;
stack_chunk_t *cache;
size_t index;
size_t limit;
size_t cache_size;
size_t unused_cache_size;
} mark_stack_t;
#define CALC_EXACT_MALLOC_SIZE 0
typedef struct rb_objspace {
......
st_table *table;
RVALUE *deferred;
} final;
struct {
VALUE buffer[MARK_STACK_MAX];
VALUE *ptr;
int overflow;
} markstack;
mark_stack_t mark_stack;
struct {
int run;
gc_profile_record *record;
......
#define during_gc objspace->flags.during_gc
#define finalizer_table objspace->final.table
#define deferred_final_list objspace->final.deferred
#define mark_stack objspace->markstack.buffer
#define mark_stack_ptr objspace->markstack.ptr
#define mark_stack_overflow objspace->markstack.overflow
#define global_List objspace->global_list
#define ruby_gc_stress objspace->gc_stress
#define initial_malloc_limit initial_params.initial_malloc_limit
......
#endif
static void initial_expand_heap(rb_objspace_t *objspace);
static void init_mark_stack(mark_stack_t *stack);
void
rb_gc_set_params(void)
......
static void gc_sweep(rb_objspace_t *);
static void slot_sweep(rb_objspace_t *, struct heaps_slot *);
static void rest_sweep(rb_objspace_t *);
static void free_stack_chunks(mark_stack_t *);
void
rb_objspace_free(rb_objspace_t *objspace)
......
heaps_used = 0;
heaps = 0;
}
free_stack_chunks(&objspace->mark_stack);
free(objspace);
}
#endif
......
init_heap(rb_objspace_t *objspace)
{
add_heap_slots(objspace, HEAP_MIN_SLOTS / HEAP_OBJ_LIMIT);
init_mark_stack(&objspace->mark_stack);
#ifdef USE_SIGALTSTACK
{
/* altstack of another threads are allocated in another place */
......
}
#endif
#define GC_LEVEL_MAX 250
#define STACKFRAME_FOR_GC_MARK (GC_LEVEL_MAX * GC_MARK_STACKFRAME_WORD)
/* Marking stack */
static void push_mark_stack(mark_stack_t *, VALUE);
static int pop_mark_stack(mark_stack_t *, VALUE *);
static void shrink_stack_chunk_cache(mark_stack_t *stack);
static stack_chunk_t *
stack_chunk_alloc(void)
{
stack_chunk_t *res;
res = malloc(sizeof(stack_chunk_t));
if (!res)
rb_memerror();
return res;
}
static inline int
is_mark_stask_empty(mark_stack_t *stack)
{
return stack->chunk == NULL;
}
static void
add_stack_chunk_cache(mark_stack_t *stack, stack_chunk_t *chunk)
{
chunk->next = stack->cache;
stack->cache = chunk;
stack->cache_size++;
}
static void
shrink_stack_chunk_cache(mark_stack_t *stack)
{
stack_chunk_t *chunk;
if (stack->unused_cache_size > (stack->cache_size/2)) {
chunk = stack->cache;
stack->cache = stack->cache->next;
stack->cache_size--;
free(chunk);
}
stack->unused_cache_size = stack->cache_size;
}
static void
push_mark_stack_chunk(mark_stack_t *stack)
{
stack_chunk_t *next;
if (stack->cache_size > 0) {
next = stack->cache;
stack->cache = stack->cache->next;
stack->cache_size--;
if (stack->unused_cache_size > stack->cache_size)
stack->unused_cache_size = stack->cache_size;
}
else {
next = stack_chunk_alloc();
}
next->next = stack->chunk;
stack->chunk = next;
stack->index = 0;
}
static void
pop_mark_stack_chunk(mark_stack_t *stack)
{
stack_chunk_t *prev;
prev = stack->chunk->next;
add_stack_chunk_cache(stack, stack->chunk);
stack->chunk = prev;
stack->index = stack->limit;
}
#if defined(ENABLE_VM_OBJSPACE) && ENABLE_VM_OBJSPACE
static void
free_stack_chunks(mark_stack_t *stack)
{
stack_chunk_t *chunk = stack->chunk;
stack_chunk_t *next = NULL;
while (chunk != NULL) {
next = chunk->next;
free(chunk);
chunk = next;
}
}
#endif
static void
push_mark_stack(mark_stack_t *stack, VALUE data)
{
if (stack->index == stack->limit) {
push_mark_stack_chunk(stack);
}
stack->chunk->data[stack->index++] = data;
}
static int
pop_mark_stack(mark_stack_t *stack, VALUE *data)
{
if (is_mark_stask_empty(stack)) {
return FALSE;
}
if (stack->index == 1) {
*data = stack->chunk->data[--stack->index];
pop_mark_stack_chunk(stack);
return TRUE;
}
*data = stack->chunk->data[--stack->index];
return TRUE;
}
static void
init_mark_stack(mark_stack_t *stack)
{
int i;
push_mark_stack_chunk(stack);
stack->limit = STACK_CHUNK_SIZE;
for(i=0; i < 4; i++) {
add_stack_chunk_cache(stack, stack_chunk_alloc());
}
stack->unused_cache_size = stack->cache_size;
}
size_t
ruby_stack_length(VALUE **p)
......
return STACK_LENGTH;
}
#if !(defined(POSIX_SIGNAL) && defined(SIGSEGV) && defined(HAVE_SIGALTSTACK))
static int
stack_check(int water_mark)
{
......
#endif
return ret;
}
#endif
#define STACKFRAME_FOR_CALL_CFUNC 512
......
#endif
}
static void
init_mark_stack(rb_objspace_t *objspace)
{
mark_stack_overflow = 0;
mark_stack_ptr = mark_stack;
}
#define MARK_STACK_EMPTY (mark_stack_ptr == mark_stack)
static void gc_mark(rb_objspace_t *objspace, VALUE ptr, int lev);
static void gc_mark_children(rb_objspace_t *objspace, VALUE ptr, int lev);
static void gc_mark(rb_objspace_t *objspace, VALUE ptr);
static void gc_mark_children(rb_objspace_t *objspace, VALUE ptr);
static void
gc_mark_all(rb_objspace_t *objspace)
gc_mark_stacked_objects(rb_objspace_t *objspace)
{
RVALUE *p, *pend;
size_t i;
mark_stack_t *mstack = &objspace->mark_stack;
VALUE obj = 0;
init_mark_stack(objspace);
for (i = 0; i < heaps_used; i++) {
p = objspace->heap.sorted[i].start; pend = objspace->heap.sorted[i].end;
while (p < pend) {
if ((p->as.basic.flags & FL_MARK) &&
(p->as.basic.flags != FL_MARK)) {
gc_mark_children(objspace, (VALUE)p, 0);
}
p++;
}
}
}
static void
gc_mark_rest(rb_objspace_t *objspace)
{
VALUE tmp_arry[MARK_STACK_MAX];
VALUE *p;
p = (mark_stack_ptr - mark_stack) + tmp_arry;
MEMCPY(tmp_arry, mark_stack, VALUE, p - tmp_arry);
init_mark_stack(objspace);
while (p != tmp_arry) {
p--;
gc_mark_children(objspace, *p, 0);
if (!mstack->index) return;
while (pop_mark_stack(mstack, &obj)) {
gc_mark_children(objspace, obj);
}
shrink_stack_chunk_cache(mstack);
}
static inline int
......
v = *x;
VALGRIND_MAKE_MEM_DEFINED(&v, sizeof(v));
if (is_pointer_to_heap(objspace, (void *)v)) {
gc_mark(objspace, v, 0);
gc_mark(objspace, v);
}
x++;
}
......
struct mark_tbl_arg {
rb_objspace_t *objspace;
int lev;
};
static int
mark_entry(ID key, VALUE value, st_data_t data)
{
struct mark_tbl_arg *arg = (void*)data;
gc_mark(arg->objspace, value, arg->lev);
gc_mark(arg->objspace, value);
return ST_CONTINUE;
}
static void
mark_tbl(rb_objspace_t *objspace, st_table *tbl, int lev)
mark_tbl(rb_objspace_t *objspace, st_table *tbl)
{
struct mark_tbl_arg arg;
if (!tbl || tbl->num_entries == 0) return;
arg.objspace = objspace;
arg.lev = lev;
st_foreach(tbl, mark_entry, (st_data_t)&arg);
}
......
mark_key(VALUE key, VALUE value, st_data_t data)
{
struct mark_tbl_arg *arg = (void*)data;
gc_mark(arg->objspace, key, arg->lev);
gc_mark(arg->objspace, key);
return ST_CONTINUE;
}
static void
mark_set(rb_objspace_t *objspace, st_table *tbl, int lev)
mark_set(rb_objspace_t *objspace, st_table *tbl)
{
struct mark_tbl_arg arg;
if (!tbl) return;
arg.objspace = objspace;
arg.lev = lev;
st_foreach(tbl, mark_key, (st_data_t)&arg);
}
void
rb_mark_set(st_table *tbl)
{
mark_set(&rb_objspace, tbl, 0);
mark_set(&rb_objspace, tbl);
}
static int
mark_keyvalue(VALUE key, VALUE value, st_data_t data)
{
struct mark_tbl_arg *arg = (void*)data;
gc_mark(arg->objspace, key, arg->lev);
gc_mark(arg->objspace, value, arg->lev);
gc_mark(arg->objspace, key);
gc_mark(arg->objspace, value);
return ST_CONTINUE;
}
static void
mark_hash(rb_objspace_t *objspace, st_table *tbl, int lev)
mark_hash(rb_objspace_t *objspace, st_table *tbl)
{
struct mark_tbl_arg arg;
if (!tbl) return;
arg.objspace = objspace;
arg.lev = lev;
st_foreach(tbl, mark_keyvalue, (st_data_t)&arg);
}
void
rb_mark_hash(st_table *tbl)
{
mark_hash(&rb_objspace, tbl, 0);
mark_hash(&rb_objspace, tbl);
}
static void
mark_method_entry(rb_objspace_t *objspace, const rb_method_entry_t *me, int lev)
mark_method_entry(rb_objspace_t *objspace, const rb_method_entry_t *me)
{
const rb_method_definition_t *def = me->def;
gc_mark(objspace, me->klass, lev);
gc_mark(objspace, me->klass);
if (!def) return;
switch (def->type) {
case VM_METHOD_TYPE_ISEQ:
gc_mark(objspace, def->body.iseq->self, lev);
gc_mark(objspace, def->body.iseq->self);
break;
case VM_METHOD_TYPE_BMETHOD:
gc_mark(objspace, def->body.proc, lev);
gc_mark(objspace, def->body.proc);
break;
case VM_METHOD_TYPE_ATTRSET:
case VM_METHOD_TYPE_IVAR:
gc_mark(objspace, def->body.attr.location, lev);
gc_mark(objspace, def->body.attr.location);
break;
default:
break; /* ignore */
......
void
rb_mark_method_entry(const rb_method_entry_t *me)
{
mark_method_entry(&rb_objspace, me, 0);
mark_method_entry(&rb_objspace, me);
}
static int
mark_method_entry_i(ID key, const rb_method_entry_t *me, st_data_t data)
{
struct mark_tbl_arg *arg = (void*)data;
mark_method_entry(arg->objspace, me, arg->lev);
mark_method_entry(arg->objspace, me);
return ST_CONTINUE;
}
static void
mark_m_tbl(rb_objspace_t *objspace, st_table *tbl, int lev)
mark_m_tbl(rb_objspace_t *objspace, st_table *tbl)
{
struct mark_tbl_arg arg;
if (!tbl) return;
arg.objspace = objspace;
arg.lev = lev;
st_foreach(tbl, mark_method_entry_i, (st_data_t)&arg);
}
......
mark_const_entry_i(ID key, const rb_const_entry_t *ce, st_data_t data)
{
struct mark_tbl_arg *arg = (void*)data;
gc_mark(arg->objspace, ce->value, arg->lev);
gc_mark(arg->objspace, ce->value);
return ST_CONTINUE;
}
static void
mark_const_tbl(rb_objspace_t *objspace, st_table *tbl, int lev)
mark_const_tbl(rb_objspace_t *objspace, st_table *tbl)
{
struct mark_tbl_arg arg;
if (!tbl) return;
arg.objspace = objspace;
arg.lev = lev;
st_foreach(tbl, mark_const_entry_i, (st_data_t)&arg);
}
......
void
rb_mark_tbl(st_table *tbl)
{
mark_tbl(&rb_objspace, tbl, 0);
mark_tbl(&rb_objspace, tbl);
}
void
rb_gc_mark_maybe(VALUE obj)
{
if (is_pointer_to_heap(&rb_objspace, (void *)obj)) {
gc_mark(&rb_objspace, obj, 0);
gc_mark(&rb_objspace, obj);
}
}
static void
gc_mark(rb_objspace_t *objspace, VALUE ptr, int lev)
gc_mark(rb_objspace_t *objspace, VALUE ptr)
{
register RVALUE *obj;
......
obj->as.basic.flags |= FL_MARK;
objspace->heap.live_num++;
if (lev > GC_LEVEL_MAX || (lev == 0 && stack_check(STACKFRAME_FOR_GC_MARK))) {
if (!mark_stack_overflow) {
if (mark_stack_ptr - mark_stack < MARK_STACK_MAX) {
*mark_stack_ptr = ptr;
mark_stack_ptr++;
}
else {
mark_stack_overflow = 1;
}
}
return;
}
gc_mark_children(objspace, ptr, lev+1);
push_mark_stack(&objspace->mark_stack, ptr);
}
void
rb_gc_mark(VALUE ptr)
{
gc_mark(&rb_objspace, ptr, 0);
gc_mark(&rb_objspace, ptr);
}
static void
gc_mark_children(rb_objspace_t *objspace, VALUE ptr, int lev)
gc_mark_children(rb_objspace_t *objspace, VALUE ptr)
{
register RVALUE *obj = RANY(ptr);
......
case NODE_RESBODY:
case NODE_CLASS:
case NODE_BLOCK_PASS:
gc_mark(objspace, (VALUE)obj->as.node.u2.node, lev);
gc_mark(objspace, (VALUE)obj->as.node.u2.node);
/* fall through */
case NODE_BLOCK: /* 1,3 */
case NODE_OPTBLOCK:
......
case NODE_DEFS:
case NODE_OP_ASGN1:
case NODE_ARGS:
gc_mark(objspace, (VALUE)obj->as.node.u1.node, lev);
gc_mark(objspace, (VALUE)obj->as.node.u1.node);
/* fall through */
case NODE_SUPER: /* 3 */
case NODE_FCALL:
......
case NODE_ALIAS:
case NODE_VALIAS:
case NODE_ARGSCAT:
gc_mark(objspace, (VALUE)obj->as.node.u1.node, lev);
gc_mark(objspace, (VALUE)obj->as.node.u1.node);
/* fall through */
case NODE_GASGN: /* 2 */
case NODE_LASGN:
......
case NODE_SCOPE: /* 2,3 */
case NODE_CDECL:
case NODE_OPT_ARG:
gc_mark(objspace, (VALUE)obj->as.node.u3.node, lev);
gc_mark(objspace, (VALUE)obj->as.node.u3.node);
ptr = (VALUE)obj->as.node.u2.node;
goto again;
......
default: /* unlisted NODE */
if (is_pointer_to_heap(objspace, obj->as.node.u1.node)) {
gc_mark(objspace, (VALUE)obj->as.node.u1.node, lev);
gc_mark(objspace, (VALUE)obj->as.node.u1.node);
}
if (is_pointer_to_heap(objspace, obj->as.node.u2.node)) {
gc_mark(objspace, (VALUE)obj->as.node.u2.node, lev);
gc_mark(objspace, (VALUE)obj->as.node.u2.node);
}
if (is_pointer_to_heap(objspace, obj->as.node.u3.node)) {
gc_mark(objspace, (VALUE)obj->as.node.u3.node, lev);
gc_mark(objspace, (VALUE)obj->as.node.u3.node);
}
}
return; /* no need to mark class. */
}
gc_mark(objspace, obj->as.basic.klass, lev);
gc_mark(objspace, obj->as.basic.klass);
switch (BUILTIN_TYPE(obj)) {
case T_ICLASS:
case T_CLASS:
case T_MODULE:
mark_m_tbl(objspace, RCLASS_M_TBL(obj), lev);
mark_tbl(objspace, RCLASS_IV_TBL(obj), lev);
mark_const_tbl(objspace, RCLASS_CONST_TBL(obj), lev);
mark_m_tbl(objspace, RCLASS_M_TBL(obj));
mark_tbl(objspace, RCLASS_IV_TBL(obj));
mark_const_tbl(objspace, RCLASS_CONST_TBL(obj));
ptr = RCLASS_SUPER(obj);
goto again;
......
long i, len = RARRAY_LEN(obj);
VALUE *ptr = RARRAY_PTR(obj);
for (i=0; i < len; i++) {
gc_mark(objspace, *ptr++, lev);
gc_mark(objspace, *ptr++);
}
}
break;
case T_HASH:
mark_hash(objspace, obj->as.hash.ntbl, lev);
mark_hash(objspace, obj->as.hash.ntbl);
ptr = obj->as.hash.ifnone;
goto again;
......
long i, len = ROBJECT_NUMIV(obj);
VALUE *ptr = ROBJECT_IVPTR(obj);
for (i = 0; i < len; i++) {
gc_mark(objspace, *ptr++, lev);
gc_mark(objspace, *ptr++);
}
}
break;
case T_FILE:
if (obj->as.file.fptr) {
gc_mark(objspace, obj->as.file.fptr->pathv, lev);
gc_mark(objspace, obj->as.file.fptr->tied_io_for_writing, lev);
gc_mark(objspace, obj->as.file.fptr->writeconv_asciicompat, lev);
gc_mark(objspace, obj->as.file.fptr->writeconv_pre_ecopts, lev);
gc_mark(objspace, obj->as.file.fptr->encs.ecopts, lev);
gc_mark(objspace, obj->as.file.fptr->write_lock, lev);
gc_mark(objspace, obj->as.file.fptr->pathv);
gc_mark(objspace, obj->as.file.fptr->tied_io_for_writing);
gc_mark(objspace, obj->as.file.fptr->writeconv_asciicompat);
gc_mark(objspace, obj->as.file.fptr->writeconv_pre_ecopts);
gc_mark(objspace, obj->as.file.fptr->encs.ecopts);
gc_mark(objspace, obj->as.file.fptr->write_lock);
}
break;
case T_REGEXP:
gc_mark(objspace, obj->as.regexp.src, lev);
break;
ptr = obj->as.regexp.src;
goto again;
case T_FLOAT:
case T_BIGNUM:
......
break;
case T_MATCH:
gc_mark(objspace, obj->as.match.regexp, lev);
gc_mark(objspace, obj->as.match.regexp);
if (obj->as.match.str) {
ptr = obj->as.match.str;
goto again;
......
break;
case T_RATIONAL:
gc_mark(objspace, obj->as.rational.num, lev);
gc_mark(objspace, obj->as.rational.den, lev);
break;
gc_mark(objspace, obj->as.rational.num);
ptr = obj->as.rational.den;
goto again;
case T_COMPLEX:
gc_mark(objspace, obj->as.complex.real, lev);
gc_mark(objspace, obj->as.complex.imag, lev);
break;
gc_mark(objspace, obj->as.complex.real);
ptr = obj->as.complex.imag;
goto again;
case T_STRUCT:
{
......
VALUE *ptr = RSTRUCT_PTR(obj);
while (len--) {
gc_mark(objspace, *ptr++, lev);
gc_mark(objspace, *ptr++);
}
}
break;
......
SET_STACK_END;
init_mark_stack(objspace);
th->vm->self ? rb_gc_mark(th->vm->self) : rb_vm_mark(th->vm);
mark_tbl(objspace, finalizer_table, 0);
mark_tbl(objspace, finalizer_table);
mark_current_machine_context(objspace, th);
rb_gc_mark_symbols();
......
rb_mark_end_proc();
rb_gc_mark_global_tbl();
mark_tbl(objspace, rb_class_tbl, 0);
mark_tbl(objspace, rb_class_tbl);
/* mark generic instance variables for special constants */
rb_mark_generic_ivar_tbl();
......
rb_gc_mark_unlinked_live_method_entries(th->vm);
/* gc_mark objects whose marking are not completed*/
while (!MARK_STACK_EMPTY) {
if (mark_stack_overflow) {
gc_mark_all(objspace);
}
else {
gc_mark_rest(objspace);
}
}
/* marking-loop */
gc_mark_stacked_objects(objspace);
GC_PROF_MARK_TIMER_STOP;
}
......
/* XXX: this loop will make no sense */
/* because mark will not be removed */
finalize_deferred(objspace);
mark_tbl(objspace, finalizer_table, 0);
mark_tbl(objspace, finalizer_table);
gc_mark_stacked_objects(objspace);
st_foreach(finalizer_table, chain_finalized_object,
(st_data_t)&deferred_final_list);
} while (deferred_final_list);
win32/Makefile.sub
#define GETGROUPS_T int
#define RETSIGTYPE void
#define TYPEOF_TIMEVAL_TV_SEC long
#define GC_MARK_STACKFRAME_WORD 30
#define HAVE_ALLOCA 1
#define HAVE_DUP2 1
#define HAVE_MEMCMP 1
(4-4/4)