Project

General

Profile

Backport #7123 ยป backport_r37088_r37083_r37082_r37076_r37075_to_193.patch

authorNari (Narihiro Nakamura), 11/13/2012 06:13 PM

View differences:

configure.in
1268 1268
  AC_DEFINE_UNQUOTED(STACK_END_ADDRESS, $rb_cv_stack_end_address)
1269 1269
fi
1270 1270

  
1271
AC_CACHE_CHECK(for gc_mark and gc_children stack frame approximate size(word), rb_cv_gc_mark_stackframe_word,
1272
[save_CFLAGS="$CFLAGS"
1273
CFLAGS="-O0"
1274
AC_TRY_RUN([
1275
int word;
1276
char *stack_start;
1277

  
1278
void
1279
set_stackframe_word()
1280
{
1281
    int dumy = 42;
1282
    int diff;
1283

  
1284
    if (stack_start < (char *)&dumy) {
1285
        diff = (int)((char *)&dumy - stack_start);
1286
    }
1287
    else {
1288
        diff = (int)(stack_start - (char *)&dumy);
1289
    }
1290
    word = (diff/sizeof(void *));
1291
    if ((diff % sizeof(void *)) != 0) {
1292
        word++;
1293
    }
1294
}
1295

  
1296
void
1297
gc_mark_children(void *p1, void *p2, int lev)
1298
{
1299
    void *obj = p2;
1300

  
1301
    set_stackframe_word(p1,p2,lev);
1302
}
1303

  
1304
void
1305
gc_mark(void *p1, void *p2, int lev)
1306
{
1307
    void *obj = p2;
1308

  
1309
    gc_mark_children(p1,p2,lev++);
1310
}
1311

  
1312
int
1313
main() {
1314
  int dumy = 42;
1315

  
1316
  stack_start = (char *)&dumy;
1317
  gc_mark(0, 0, 255);
1318
  return word;
1319
}
1320
],
1321
  [rb_cv_gc_mark_stackframe_word="$?"],
1322
  [rb_cv_gc_mark_stackframe_word="$?"],
1323
  [rb_cv_gc_mark_stackframe_word="30"])
1324
CFLAGS="$save_CFLAGS"])
1325
AC_DEFINE_UNQUOTED(GC_MARK_STACKFRAME_WORD, $rb_cv_gc_mark_stackframe_word)
1326

  
1327

  
1328 1271
dnl Checks for library functions.
1329 1272
AC_TYPE_GETGROUPS
1330 1273
AC_TYPE_SIGNAL
gc.c
98 98

  
99 99
#define nomem_error GET_VM()->special_exceptions[ruby_error_nomemory]
100 100

  
101
#define MARK_STACK_MAX 1024
102

  
103 101
int ruby_gc_debug_indent = 0;
104 102

  
105 103
/* for GC profile */
......
319 317
    struct gc_list *next;
320 318
};
321 319

  
320
#define STACK_CHUNK_SIZE 500
321

  
322
typedef struct stack_chunk {
323
    VALUE data[STACK_CHUNK_SIZE];
324
    struct stack_chunk *next;
325
} stack_chunk_t;
326

  
327
typedef struct mark_stack {
328
    stack_chunk_t *chunk;
329
    stack_chunk_t *cache;
330
    size_t index;
331
    size_t limit;
332
    size_t cache_size;
333
    size_t unused_cache_size;
334
} mark_stack_t;
335

  
322 336
#define CALC_EXACT_MALLOC_SIZE 0
323 337

  
324 338
typedef struct rb_objspace {
......
355 369
	st_table *table;
356 370
	RVALUE *deferred;
357 371
    } final;
358
    struct {
359
	VALUE buffer[MARK_STACK_MAX];
360
	VALUE *ptr;
361
	int overflow;
362
    } markstack;
372
    mark_stack_t mark_stack;
363 373
    struct {
364 374
	int run;
365 375
	gc_profile_record *record;
......
394 404
#define during_gc		objspace->flags.during_gc
395 405
#define finalizer_table 	objspace->final.table
396 406
#define deferred_final_list	objspace->final.deferred
397
#define mark_stack		objspace->markstack.buffer
398
#define mark_stack_ptr		objspace->markstack.ptr
399
#define mark_stack_overflow	objspace->markstack.overflow
400 407
#define global_List		objspace->global_list
401 408
#define ruby_gc_stress		objspace->gc_stress
402 409
#define initial_malloc_limit	initial_params.initial_malloc_limit
......
419 426
#endif
420 427

  
421 428
static void initial_expand_heap(rb_objspace_t *objspace);
429
static void init_mark_stack(mark_stack_t *stack);
422 430

  
423 431
void
424 432
rb_gc_set_params(void)
......
465 473
static void gc_sweep(rb_objspace_t *);
466 474
static void slot_sweep(rb_objspace_t *, struct heaps_slot *);
467 475
static void rest_sweep(rb_objspace_t *);
476
static void free_stack_chunks(mark_stack_t *);
468 477

  
469 478
void
470 479
rb_objspace_free(rb_objspace_t *objspace)
......
491 500
	heaps_used = 0;
492 501
	heaps = 0;
493 502
    }
503
    free_stack_chunks(&objspace->mark_stack);
494 504
    free(objspace);
495 505
}
496 506
#endif
......
1100 1110
init_heap(rb_objspace_t *objspace)
1101 1111
{
1102 1112
    add_heap_slots(objspace, HEAP_MIN_SLOTS / HEAP_OBJ_LIMIT);
1113
    init_mark_stack(&objspace->mark_stack);
1103 1114
#ifdef USE_SIGALTSTACK
1104 1115
    {
1105 1116
	/* altstack of another threads are allocated in another place */
......
1296 1307
}
1297 1308
#endif
1298 1309

  
1299
#define GC_LEVEL_MAX 250
1300
#define STACKFRAME_FOR_GC_MARK (GC_LEVEL_MAX * GC_MARK_STACKFRAME_WORD)
1310
/* Marking stack */
1311

  
1312
static void push_mark_stack(mark_stack_t *, VALUE);
1313
static int pop_mark_stack(mark_stack_t *, VALUE *);
1314
static void shrink_stack_chunk_cache(mark_stack_t *stack);
1315

  
1316
static stack_chunk_t *
1317
stack_chunk_alloc(void)
1318
{
1319
    stack_chunk_t *res;
1320

  
1321
    res = malloc(sizeof(stack_chunk_t));
1322
    if (!res)
1323
        rb_memerror();
1324

  
1325
    return res;
1326
}
1327

  
1328
static inline int
1329
is_mark_stask_empty(mark_stack_t *stack)
1330
{
1331
    return stack->chunk == NULL;
1332
}
1333

  
1334
static void
1335
add_stack_chunk_cache(mark_stack_t *stack, stack_chunk_t *chunk)
1336
{
1337
    chunk->next = stack->cache;
1338
    stack->cache = chunk;
1339
    stack->cache_size++;
1340
}
1341

  
1342
static void
1343
shrink_stack_chunk_cache(mark_stack_t *stack)
1344
{
1345
    stack_chunk_t *chunk;
1346

  
1347
    if (stack->unused_cache_size > (stack->cache_size/2)) {
1348
        chunk = stack->cache;
1349
        stack->cache = stack->cache->next;
1350
        stack->cache_size--;
1351
        free(chunk);
1352
    }
1353
    stack->unused_cache_size = stack->cache_size;
1354
}
1355

  
1356
static void
1357
push_mark_stack_chunk(mark_stack_t *stack)
1358
{
1359
    stack_chunk_t *next;
1360

  
1361
    if (stack->cache_size > 0) {
1362
        next = stack->cache;
1363
        stack->cache = stack->cache->next;
1364
        stack->cache_size--;
1365
        if (stack->unused_cache_size > stack->cache_size)
1366
            stack->unused_cache_size = stack->cache_size;
1367
    }
1368
    else {
1369
        next = stack_chunk_alloc();
1370
    }
1371
    next->next = stack->chunk;
1372
    stack->chunk = next;
1373
    stack->index = 0;
1374
}
1375

  
1376
static void
1377
pop_mark_stack_chunk(mark_stack_t *stack)
1378
{
1379
    stack_chunk_t *prev;
1380

  
1381
    prev = stack->chunk->next;
1382
    add_stack_chunk_cache(stack, stack->chunk);
1383
    stack->chunk = prev;
1384
    stack->index = stack->limit;
1385
}
1386

  
1387
#if defined(ENABLE_VM_OBJSPACE) && ENABLE_VM_OBJSPACE
1388
static void
1389
free_stack_chunks(mark_stack_t *stack)
1390
{
1391
    stack_chunk_t *chunk = stack->chunk;
1392
    stack_chunk_t *next = NULL;
1393

  
1394
    while (chunk != NULL) {
1395
        next = chunk->next;
1396
        free(chunk);
1397
        chunk = next;
1398
    }
1399
}
1400
#endif
1401

  
1402
static void
1403
push_mark_stack(mark_stack_t *stack, VALUE data)
1404
{
1405
    if (stack->index == stack->limit) {
1406
        push_mark_stack_chunk(stack);
1407
    }
1408
    stack->chunk->data[stack->index++] = data;
1409
}
1410

  
1411
static int
1412
pop_mark_stack(mark_stack_t *stack, VALUE *data)
1413
{
1414
    if (is_mark_stask_empty(stack)) {
1415
        return FALSE;
1416
    }
1417
    if (stack->index == 1) {
1418
        *data = stack->chunk->data[--stack->index];
1419
        pop_mark_stack_chunk(stack);
1420
        return TRUE;
1421
    }
1422
    *data = stack->chunk->data[--stack->index];
1423
    return TRUE;
1424
}
1425

  
1426
static void
1427
init_mark_stack(mark_stack_t *stack)
1428
{
1429
    int i;
1430

  
1431
    push_mark_stack_chunk(stack);
1432
    stack->limit = STACK_CHUNK_SIZE;
1433

  
1434
    for(i=0; i < 4; i++) {
1435
        add_stack_chunk_cache(stack, stack_chunk_alloc());
1436
    }
1437
    stack->unused_cache_size = stack->cache_size;
1438
}
1439

  
1301 1440

  
1302 1441
size_t
1303 1442
ruby_stack_length(VALUE **p)
......
1308 1447
    return STACK_LENGTH;
1309 1448
}
1310 1449

  
1450
#if !(defined(POSIX_SIGNAL) && defined(SIGSEGV) && defined(HAVE_SIGALTSTACK))
1311 1451
static int
1312 1452
stack_check(int water_mark)
1313 1453
{
......
1323 1463
#endif
1324 1464
    return ret;
1325 1465
}
1466
#endif
1326 1467

  
1327 1468
#define STACKFRAME_FOR_CALL_CFUNC 512
1328 1469

  
......
1336 1477
#endif
1337 1478
}
1338 1479

  
1339
static void
1340
init_mark_stack(rb_objspace_t *objspace)
1341
{
1342
    mark_stack_overflow = 0;
1343
    mark_stack_ptr = mark_stack;
1344
}
1345

  
1346 1480
#define MARK_STACK_EMPTY (mark_stack_ptr == mark_stack)
1347 1481

  
1348
static void gc_mark(rb_objspace_t *objspace, VALUE ptr, int lev);
1349
static void gc_mark_children(rb_objspace_t *objspace, VALUE ptr, int lev);
1482
static void gc_mark(rb_objspace_t *objspace, VALUE ptr);
1483
static void gc_mark_children(rb_objspace_t *objspace, VALUE ptr);
1350 1484

  
1351 1485
static void
1352
gc_mark_all(rb_objspace_t *objspace)
1486
gc_mark_stacked_objects(rb_objspace_t *objspace)
1353 1487
{
1354
    RVALUE *p, *pend;
1355
    size_t i;
1488
    mark_stack_t *mstack = &objspace->mark_stack;
1489
    VALUE obj = 0;
1356 1490

  
1357
    init_mark_stack(objspace);
1358
    for (i = 0; i < heaps_used; i++) {
1359
	p = objspace->heap.sorted[i].start; pend = objspace->heap.sorted[i].end;
1360
	while (p < pend) {
1361
	    if ((p->as.basic.flags & FL_MARK) &&
1362
		(p->as.basic.flags != FL_MARK)) {
1363
		gc_mark_children(objspace, (VALUE)p, 0);
1364
	    }
1365
	    p++;
1366
	}
1367
    }
1368
}
1369

  
1370
static void
1371
gc_mark_rest(rb_objspace_t *objspace)
1372
{
1373
    VALUE tmp_arry[MARK_STACK_MAX];
1374
    VALUE *p;
1375

  
1376
    p = (mark_stack_ptr - mark_stack) + tmp_arry;
1377
    MEMCPY(tmp_arry, mark_stack, VALUE, p - tmp_arry);
1378

  
1379
    init_mark_stack(objspace);
1380
    while (p != tmp_arry) {
1381
	p--;
1382
	gc_mark_children(objspace, *p, 0);
1491
    if (!mstack->index) return;
1492
    while (pop_mark_stack(mstack, &obj)) {
1493
        gc_mark_children(objspace, obj);
1383 1494
    }
1495
    shrink_stack_chunk_cache(mstack);
1384 1496
}
1385 1497

  
1386 1498
static inline int
......
1419 1531
        v = *x;
1420 1532
        VALGRIND_MAKE_MEM_DEFINED(&v, sizeof(v));
1421 1533
	if (is_pointer_to_heap(objspace, (void *)v)) {
1422
	    gc_mark(objspace, v, 0);
1534
	    gc_mark(objspace, v);
1423 1535
	}
1424 1536
	x++;
1425 1537
    }
......
1445 1557

  
1446 1558
struct mark_tbl_arg {
1447 1559
    rb_objspace_t *objspace;
1448
    int lev;
1449 1560
};
1450 1561

  
1451 1562
static int
1452 1563
mark_entry(ID key, VALUE value, st_data_t data)
1453 1564
{
1454 1565
    struct mark_tbl_arg *arg = (void*)data;
1455
    gc_mark(arg->objspace, value, arg->lev);
1566
    gc_mark(arg->objspace, value);
1456 1567
    return ST_CONTINUE;
1457 1568
}
1458 1569

  
1459 1570
static void
1460
mark_tbl(rb_objspace_t *objspace, st_table *tbl, int lev)
1571
mark_tbl(rb_objspace_t *objspace, st_table *tbl)
1461 1572
{
1462 1573
    struct mark_tbl_arg arg;
1463 1574
    if (!tbl || tbl->num_entries == 0) return;
1464 1575
    arg.objspace = objspace;
1465
    arg.lev = lev;
1466 1576
    st_foreach(tbl, mark_entry, (st_data_t)&arg);
1467 1577
}
1468 1578

  
......
1470 1580
mark_key(VALUE key, VALUE value, st_data_t data)
1471 1581
{
1472 1582
    struct mark_tbl_arg *arg = (void*)data;
1473
    gc_mark(arg->objspace, key, arg->lev);
1583
    gc_mark(arg->objspace, key);
1474 1584
    return ST_CONTINUE;
1475 1585
}
1476 1586

  
1477 1587
static void
1478
mark_set(rb_objspace_t *objspace, st_table *tbl, int lev)
1588
mark_set(rb_objspace_t *objspace, st_table *tbl)
1479 1589
{
1480 1590
    struct mark_tbl_arg arg;
1481 1591
    if (!tbl) return;
1482 1592
    arg.objspace = objspace;
1483
    arg.lev = lev;
1484 1593
    st_foreach(tbl, mark_key, (st_data_t)&arg);
1485 1594
}
1486 1595

  
1487 1596
void
1488 1597
rb_mark_set(st_table *tbl)
1489 1598
{
1490
    mark_set(&rb_objspace, tbl, 0);
1599
    mark_set(&rb_objspace, tbl);
1491 1600
}
1492 1601

  
1493 1602
static int
1494 1603
mark_keyvalue(VALUE key, VALUE value, st_data_t data)
1495 1604
{
1496 1605
    struct mark_tbl_arg *arg = (void*)data;
1497
    gc_mark(arg->objspace, key, arg->lev);
1498
    gc_mark(arg->objspace, value, arg->lev);
1606
    gc_mark(arg->objspace, key);
1607
    gc_mark(arg->objspace, value);
1499 1608
    return ST_CONTINUE;
1500 1609
}
1501 1610

  
1502 1611
static void
1503
mark_hash(rb_objspace_t *objspace, st_table *tbl, int lev)
1612
mark_hash(rb_objspace_t *objspace, st_table *tbl)
1504 1613
{
1505 1614
    struct mark_tbl_arg arg;
1506 1615
    if (!tbl) return;
1507 1616
    arg.objspace = objspace;
1508
    arg.lev = lev;
1509 1617
    st_foreach(tbl, mark_keyvalue, (st_data_t)&arg);
1510 1618
}
1511 1619

  
1512 1620
void
1513 1621
rb_mark_hash(st_table *tbl)
1514 1622
{
1515
    mark_hash(&rb_objspace, tbl, 0);
1623
    mark_hash(&rb_objspace, tbl);
1516 1624
}
1517 1625

  
1518 1626
static void
1519
mark_method_entry(rb_objspace_t *objspace, const rb_method_entry_t *me, int lev)
1627
mark_method_entry(rb_objspace_t *objspace, const rb_method_entry_t *me)
1520 1628
{
1521 1629
    const rb_method_definition_t *def = me->def;
1522 1630

  
1523
    gc_mark(objspace, me->klass, lev);
1631
    gc_mark(objspace, me->klass);
1524 1632
    if (!def) return;
1525 1633
    switch (def->type) {
1526 1634
      case VM_METHOD_TYPE_ISEQ:
1527
	gc_mark(objspace, def->body.iseq->self, lev);
1635
	gc_mark(objspace, def->body.iseq->self);
1528 1636
	break;
1529 1637
      case VM_METHOD_TYPE_BMETHOD:
1530
	gc_mark(objspace, def->body.proc, lev);
1638
	gc_mark(objspace, def->body.proc);
1531 1639
	break;
1532 1640
      case VM_METHOD_TYPE_ATTRSET:
1533 1641
      case VM_METHOD_TYPE_IVAR:
1534
	gc_mark(objspace, def->body.attr.location, lev);
1642
	gc_mark(objspace, def->body.attr.location);
1535 1643
	break;
1536 1644
      default:
1537 1645
	break; /* ignore */
......
1541 1649
void
1542 1650
rb_mark_method_entry(const rb_method_entry_t *me)
1543 1651
{
1544
    mark_method_entry(&rb_objspace, me, 0);
1652
    mark_method_entry(&rb_objspace, me);
1545 1653
}
1546 1654

  
1547 1655
static int
1548 1656
mark_method_entry_i(ID key, const rb_method_entry_t *me, st_data_t data)
1549 1657
{
1550 1658
    struct mark_tbl_arg *arg = (void*)data;
1551
    mark_method_entry(arg->objspace, me, arg->lev);
1659
    mark_method_entry(arg->objspace, me);
1552 1660
    return ST_CONTINUE;
1553 1661
}
1554 1662

  
1555 1663
static void
1556
mark_m_tbl(rb_objspace_t *objspace, st_table *tbl, int lev)
1664
mark_m_tbl(rb_objspace_t *objspace, st_table *tbl)
1557 1665
{
1558 1666
    struct mark_tbl_arg arg;
1559 1667
    if (!tbl) return;
1560 1668
    arg.objspace = objspace;
1561
    arg.lev = lev;
1562 1669
    st_foreach(tbl, mark_method_entry_i, (st_data_t)&arg);
1563 1670
}
1564 1671

  
......
1580 1687
mark_const_entry_i(ID key, const rb_const_entry_t *ce, st_data_t data)
1581 1688
{
1582 1689
    struct mark_tbl_arg *arg = (void*)data;
1583
    gc_mark(arg->objspace, ce->value, arg->lev);
1690
    gc_mark(arg->objspace, ce->value);
1584 1691
    return ST_CONTINUE;
1585 1692
}
1586 1693

  
1587 1694
static void
1588
mark_const_tbl(rb_objspace_t *objspace, st_table *tbl, int lev)
1695
mark_const_tbl(rb_objspace_t *objspace, st_table *tbl)
1589 1696
{
1590 1697
    struct mark_tbl_arg arg;
1591 1698
    if (!tbl) return;
1592 1699
    arg.objspace = objspace;
1593
    arg.lev = lev;
1594 1700
    st_foreach(tbl, mark_const_entry_i, (st_data_t)&arg);
1595 1701
}
1596 1702

  
......
1611 1717
void
1612 1718
rb_mark_tbl(st_table *tbl)
1613 1719
{
1614
    mark_tbl(&rb_objspace, tbl, 0);
1720
    mark_tbl(&rb_objspace, tbl);
1615 1721
}
1616 1722

  
1617 1723
void
1618 1724
rb_gc_mark_maybe(VALUE obj)
1619 1725
{
1620 1726
    if (is_pointer_to_heap(&rb_objspace, (void *)obj)) {
1621
	gc_mark(&rb_objspace, obj, 0);
1727
	gc_mark(&rb_objspace, obj);
1622 1728
    }
1623 1729
}
1624 1730

  
1625 1731
static void
1626
gc_mark(rb_objspace_t *objspace, VALUE ptr, int lev)
1732
gc_mark(rb_objspace_t *objspace, VALUE ptr)
1627 1733
{
1628 1734
    register RVALUE *obj;
1629 1735

  
......
1634 1740
    obj->as.basic.flags |= FL_MARK;
1635 1741
    objspace->heap.live_num++;
1636 1742

  
1637
    if (lev > GC_LEVEL_MAX || (lev == 0 && stack_check(STACKFRAME_FOR_GC_MARK))) {
1638
	if (!mark_stack_overflow) {
1639
	    if (mark_stack_ptr - mark_stack < MARK_STACK_MAX) {
1640
		*mark_stack_ptr = ptr;
1641
		mark_stack_ptr++;
1642
	    }
1643
	    else {
1644
		mark_stack_overflow = 1;
1645
	    }
1646
	}
1647
	return;
1648
    }
1649
    gc_mark_children(objspace, ptr, lev+1);
1743
    push_mark_stack(&objspace->mark_stack, ptr);
1650 1744
}
1651 1745

  
1652 1746
void
1653 1747
rb_gc_mark(VALUE ptr)
1654 1748
{
1655
    gc_mark(&rb_objspace, ptr, 0);
1749
    gc_mark(&rb_objspace, ptr);
1656 1750
}
1657 1751

  
1658 1752
static void
1659
gc_mark_children(rb_objspace_t *objspace, VALUE ptr, int lev)
1753
gc_mark_children(rb_objspace_t *objspace, VALUE ptr)
1660 1754
{
1661 1755
    register RVALUE *obj = RANY(ptr);
1662 1756

  
......
1692 1786
	  case NODE_RESBODY:
1693 1787
	  case NODE_CLASS:
1694 1788
	  case NODE_BLOCK_PASS:
1695
	    gc_mark(objspace, (VALUE)obj->as.node.u2.node, lev);
1789
	    gc_mark(objspace, (VALUE)obj->as.node.u2.node);
1696 1790
	    /* fall through */
1697 1791
	  case NODE_BLOCK:	/* 1,3 */
1698 1792
	  case NODE_OPTBLOCK:
......
1706 1800
	  case NODE_DEFS:
1707 1801
	  case NODE_OP_ASGN1:
1708 1802
	  case NODE_ARGS:
1709
	    gc_mark(objspace, (VALUE)obj->as.node.u1.node, lev);
1803
	    gc_mark(objspace, (VALUE)obj->as.node.u1.node);
1710 1804
	    /* fall through */
1711 1805
	  case NODE_SUPER:	/* 3 */
1712 1806
	  case NODE_FCALL:
......
1733 1827
	  case NODE_ALIAS:
1734 1828
	  case NODE_VALIAS:
1735 1829
	  case NODE_ARGSCAT:
1736
	    gc_mark(objspace, (VALUE)obj->as.node.u1.node, lev);
1830
	    gc_mark(objspace, (VALUE)obj->as.node.u1.node);
1737 1831
	    /* fall through */
1738 1832
	  case NODE_GASGN:	/* 2 */
1739 1833
	  case NODE_LASGN:
......
1769 1863
	  case NODE_SCOPE:	/* 2,3 */
1770 1864
	  case NODE_CDECL:
1771 1865
	  case NODE_OPT_ARG:
1772
	    gc_mark(objspace, (VALUE)obj->as.node.u3.node, lev);
1866
	    gc_mark(objspace, (VALUE)obj->as.node.u3.node);
1773 1867
	    ptr = (VALUE)obj->as.node.u2.node;
1774 1868
	    goto again;
1775 1869

  
......
1801 1895

  
1802 1896
	  default:		/* unlisted NODE */
1803 1897
	    if (is_pointer_to_heap(objspace, obj->as.node.u1.node)) {
1804
		gc_mark(objspace, (VALUE)obj->as.node.u1.node, lev);
1898
		gc_mark(objspace, (VALUE)obj->as.node.u1.node);
1805 1899
	    }
1806 1900
	    if (is_pointer_to_heap(objspace, obj->as.node.u2.node)) {
1807
		gc_mark(objspace, (VALUE)obj->as.node.u2.node, lev);
1901
		gc_mark(objspace, (VALUE)obj->as.node.u2.node);
1808 1902
	    }
1809 1903
	    if (is_pointer_to_heap(objspace, obj->as.node.u3.node)) {
1810
		gc_mark(objspace, (VALUE)obj->as.node.u3.node, lev);
1904
		gc_mark(objspace, (VALUE)obj->as.node.u3.node);
1811 1905
	    }
1812 1906
	}
1813 1907
	return;			/* no need to mark class. */
1814 1908
    }
1815 1909

  
1816
    gc_mark(objspace, obj->as.basic.klass, lev);
1910
    gc_mark(objspace, obj->as.basic.klass);
1817 1911
    switch (BUILTIN_TYPE(obj)) {
1818 1912
      case T_ICLASS:
1819 1913
      case T_CLASS:
1820 1914
      case T_MODULE:
1821
	mark_m_tbl(objspace, RCLASS_M_TBL(obj), lev);
1822
	mark_tbl(objspace, RCLASS_IV_TBL(obj), lev);
1823
	mark_const_tbl(objspace, RCLASS_CONST_TBL(obj), lev);
1915
	mark_m_tbl(objspace, RCLASS_M_TBL(obj));
1916
	mark_tbl(objspace, RCLASS_IV_TBL(obj));
1917
	mark_const_tbl(objspace, RCLASS_CONST_TBL(obj));
1824 1918
	ptr = RCLASS_SUPER(obj);
1825 1919
	goto again;
1826 1920

  
......
1833 1927
	    long i, len = RARRAY_LEN(obj);
1834 1928
	    VALUE *ptr = RARRAY_PTR(obj);
1835 1929
	    for (i=0; i < len; i++) {
1836
		gc_mark(objspace, *ptr++, lev);
1930
		gc_mark(objspace, *ptr++);
1837 1931
	    }
1838 1932
	}
1839 1933
	break;
1840 1934

  
1841 1935
      case T_HASH:
1842
	mark_hash(objspace, obj->as.hash.ntbl, lev);
1936
	mark_hash(objspace, obj->as.hash.ntbl);
1843 1937
	ptr = obj->as.hash.ifnone;
1844 1938
	goto again;
1845 1939

  
......
1866 1960
            long i, len = ROBJECT_NUMIV(obj);
1867 1961
	    VALUE *ptr = ROBJECT_IVPTR(obj);
1868 1962
            for (i  = 0; i < len; i++) {
1869
		gc_mark(objspace, *ptr++, lev);
1963
		gc_mark(objspace, *ptr++);
1870 1964
            }
1871 1965
        }
1872 1966
	break;
1873 1967

  
1874 1968
      case T_FILE:
1875 1969
        if (obj->as.file.fptr) {
1876
            gc_mark(objspace, obj->as.file.fptr->pathv, lev);
1877
            gc_mark(objspace, obj->as.file.fptr->tied_io_for_writing, lev);
1878
            gc_mark(objspace, obj->as.file.fptr->writeconv_asciicompat, lev);
1879
            gc_mark(objspace, obj->as.file.fptr->writeconv_pre_ecopts, lev);
1880
            gc_mark(objspace, obj->as.file.fptr->encs.ecopts, lev);
1881
            gc_mark(objspace, obj->as.file.fptr->write_lock, lev);
1970
            gc_mark(objspace, obj->as.file.fptr->pathv);
1971
            gc_mark(objspace, obj->as.file.fptr->tied_io_for_writing);
1972
            gc_mark(objspace, obj->as.file.fptr->writeconv_asciicompat);
1973
            gc_mark(objspace, obj->as.file.fptr->writeconv_pre_ecopts);
1974
            gc_mark(objspace, obj->as.file.fptr->encs.ecopts);
1975
            gc_mark(objspace, obj->as.file.fptr->write_lock);
1882 1976
        }
1883 1977
        break;
1884 1978

  
1885 1979
      case T_REGEXP:
1886
        gc_mark(objspace, obj->as.regexp.src, lev);
1887
        break;
1980
        ptr = obj->as.regexp.src;
1981
        goto again;
1888 1982

  
1889 1983
      case T_FLOAT:
1890 1984
      case T_BIGNUM:
......
1892 1986
	break;
1893 1987

  
1894 1988
      case T_MATCH:
1895
	gc_mark(objspace, obj->as.match.regexp, lev);
1989
	gc_mark(objspace, obj->as.match.regexp);
1896 1990
	if (obj->as.match.str) {
1897 1991
	    ptr = obj->as.match.str;
1898 1992
	    goto again;
......
1900 1994
	break;
1901 1995

  
1902 1996
      case T_RATIONAL:
1903
	gc_mark(objspace, obj->as.rational.num, lev);
1904
	gc_mark(objspace, obj->as.rational.den, lev);
1905
	break;
1997
	gc_mark(objspace, obj->as.rational.num);
1998
	ptr = obj->as.rational.den;
1999
	goto again;
1906 2000

  
1907 2001
      case T_COMPLEX:
1908
	gc_mark(objspace, obj->as.complex.real, lev);
1909
	gc_mark(objspace, obj->as.complex.imag, lev);
1910
	break;
2002
	gc_mark(objspace, obj->as.complex.real);
2003
	ptr = obj->as.complex.imag;
2004
	goto again;
1911 2005

  
1912 2006
      case T_STRUCT:
1913 2007
	{
......
1915 2009
	    VALUE *ptr = RSTRUCT_PTR(obj);
1916 2010

  
1917 2011
	    while (len--) {
1918
		gc_mark(objspace, *ptr++, lev);
2012
		gc_mark(objspace, *ptr++);
1919 2013
	    }
1920 2014
	}
1921 2015
	break;
......
2441 2535

  
2442 2536
    SET_STACK_END;
2443 2537

  
2444
    init_mark_stack(objspace);
2445

  
2446 2538
    th->vm->self ? rb_gc_mark(th->vm->self) : rb_vm_mark(th->vm);
2447 2539

  
2448
    mark_tbl(objspace, finalizer_table, 0);
2540
    mark_tbl(objspace, finalizer_table);
2449 2541
    mark_current_machine_context(objspace, th);
2450 2542

  
2451 2543
    rb_gc_mark_symbols();
......
2458 2550
    rb_mark_end_proc();
2459 2551
    rb_gc_mark_global_tbl();
2460 2552

  
2461
    mark_tbl(objspace, rb_class_tbl, 0);
2553
    mark_tbl(objspace, rb_class_tbl);
2462 2554

  
2463 2555
    /* mark generic instance variables for special constants */
2464 2556
    rb_mark_generic_ivar_tbl();
......
2467 2559

  
2468 2560
    rb_gc_mark_unlinked_live_method_entries(th->vm);
2469 2561

  
2470
    /* gc_mark objects whose marking are not completed*/
2471
    while (!MARK_STACK_EMPTY) {
2472
	if (mark_stack_overflow) {
2473
	    gc_mark_all(objspace);
2474
	}
2475
	else {
2476
	    gc_mark_rest(objspace);
2477
	}
2478
    }
2562
    /* marking-loop */
2563
    gc_mark_stacked_objects(objspace);
2564

  
2479 2565
    GC_PROF_MARK_TIMER_STOP;
2480 2566
}
2481 2567

  
......
3008 3094
	/* XXX: this loop will make no sense */
3009 3095
	/* because mark will not be removed */
3010 3096
	finalize_deferred(objspace);
3011
	mark_tbl(objspace, finalizer_table, 0);
3097
	mark_tbl(objspace, finalizer_table);
3098
	gc_mark_stacked_objects(objspace);
3012 3099
	st_foreach(finalizer_table, chain_finalized_object,
3013 3100
		   (st_data_t)&deferred_final_list);
3014 3101
    } while (deferred_final_list);
win32/Makefile.sub
595 595
#define GETGROUPS_T int
596 596
#define RETSIGTYPE void
597 597
#define TYPEOF_TIMEVAL_TV_SEC long
598
#define GC_MARK_STACKFRAME_WORD 30
599 598
#define HAVE_ALLOCA 1
600 599
#define HAVE_DUP2 1
601 600
#define HAVE_MEMCMP 1