ViewVC Help
View File | Revision Log | Show Annotations | Download File
/cvs/libev/ev.c
(Generate patch)

Comparing libev/ev.c (file contents):
Revision 1.230 by root, Fri May 2 08:13:16 2008 UTC vs.
Revision 1.239 by root, Thu May 8 20:52:13 2008 UTC

520 } 520 }
521} 521}
522 522
523/*****************************************************************************/ 523/*****************************************************************************/
524 524
525#define MALLOC_ROUND 4096 /* prefer to allocate in chunks of this size, must be 2**n and >> 4 longs */
526
525int inline_size 527int inline_size
526array_nextsize (int elem, int cur, int cnt) 528array_nextsize (int elem, int cur, int cnt)
527{ 529{
528 int ncur = cur + 1; 530 int ncur = cur + 1;
529 531
530 do 532 do
531 ncur <<= 1; 533 ncur <<= 1;
532 while (cnt > ncur); 534 while (cnt > ncur);
533 535
534 /* if size > 4096, round to 4096 - 4 * longs to accomodate malloc overhead */ 536 /* if size is large, round to MALLOC_ROUND - 4 * longs to accomodate malloc overhead */
535 if (elem * ncur > 4096) 537 if (elem * ncur > MALLOC_ROUND - sizeof (void *) * 4)
536 { 538 {
537 ncur *= elem; 539 ncur *= elem;
538 ncur = (ncur + elem + 4095 + sizeof (void *) * 4) & ~4095; 540 ncur = (ncur + elem + (MALLOC_ROUND - 1) + sizeof (void *) * 4) & ~(MALLOC_ROUND - 1);
539 ncur = ncur - sizeof (void *) * 4; 541 ncur = ncur - sizeof (void *) * 4;
540 ncur /= elem; 542 ncur /= elem;
541 } 543 }
542 544
543 return ncur; 545 return ncur;
757 } 759 }
758} 760}
759 761
760/*****************************************************************************/ 762/*****************************************************************************/
761 763
764/*
765 * at the moment we allow libev the luxury of two heaps,
766 * a small-code-size 2-heap one and a ~1.5kb larger 4-heap
767 * which is more cache-efficient.
768 * the difference is about 5% with 50000+ watchers.
769 */
770#define USE_4HEAP !EV_MINIMAL
771#if USE_4HEAP
772
773#define DHEAP 4
774#define HEAP0 (DHEAP - 1) /* index of first element in heap */
775
762/* towards the root */ 776/* towards the root */
763void inline_speed 777void inline_speed
764upheap (WT *heap, int k) 778upheap (WT *heap, int k)
765{ 779{
766 WT w = heap [k]; 780 WT w = heap [k];
767 781
768 for (;;) 782 for (;;)
769 { 783 {
784 int p = ((k - HEAP0 - 1) / DHEAP) + HEAP0;
785
786 if (p == k || heap [p]->at <= w->at)
787 break;
788
789 heap [k] = heap [p];
790 ev_active (heap [k]) = k;
791 k = p;
792 }
793
794 heap [k] = w;
795 ev_active (heap [k]) = k;
796}
797
798/* away from the root */
799void inline_speed
800downheap (WT *heap, int N, int k)
801{
802 WT w = heap [k];
803 WT *E = heap + N + HEAP0;
804
805 for (;;)
806 {
807 ev_tstamp minat;
808 WT *minpos;
809 WT *pos = heap + DHEAP * (k - HEAP0) + HEAP0;
810
811 // find minimum child
812 if (expect_true (pos + DHEAP - 1 < E))
813 {
814 /* fast path */
815 (minpos = pos + 0), (minat = (*minpos)->at);
816 if (pos [1]->at < minat) (minpos = pos + 1), (minat = (*minpos)->at);
817 if (pos [2]->at < minat) (minpos = pos + 2), (minat = (*minpos)->at);
818 if (pos [3]->at < minat) (minpos = pos + 3), (minat = (*minpos)->at);
819 }
820 else
821 {
822 /* slow path */
823 if (pos >= E)
824 break;
825 (minpos = pos + 0), (minat = (*minpos)->at);
826 if (pos + 1 < E && pos [1]->at < minat) (minpos = pos + 1), (minat = (*minpos)->at);
827 if (pos + 2 < E && pos [2]->at < minat) (minpos = pos + 2), (minat = (*minpos)->at);
828 if (pos + 3 < E && pos [3]->at < minat) (minpos = pos + 3), (minat = (*minpos)->at);
829 }
830
831 if (w->at <= minat)
832 break;
833
834 ev_active (*minpos) = k;
835 heap [k] = *minpos;
836
837 k = minpos - heap;
838 }
839
840 heap [k] = w;
841 ev_active (heap [k]) = k;
842}
843
844#else // 4HEAP
845
846#define HEAP0 1
847
848/* towards the root */
849void inline_speed
850upheap (WT *heap, int k)
851{
852 WT w = heap [k];
853
854 for (;;)
855 {
770 int p = k >> 1; 856 int p = k >> 1;
771 857
772 /* maybe we could use a dummy element at heap [0]? */ 858 /* maybe we could use a dummy element at heap [0]? */
773 if (!p || heap [p]->at <= w->at) 859 if (!p || heap [p]->at <= w->at)
774 break; 860 break;
793 int c = k << 1; 879 int c = k << 1;
794 880
795 if (c > N) 881 if (c > N)
796 break; 882 break;
797 883
798 c += c < N && heap [c]->at > heap [c + 1]->at 884 c += c + 1 < N && heap [c]->at > heap [c + 1]->at
799 ? 1 : 0; 885 ? 1 : 0;
800 886
801 if (w->at <= heap [c]->at) 887 if (w->at <= heap [c]->at)
802 break; 888 break;
803 889
804 heap [k] = heap [c]; 890 heap [k] = heap [c];
805 ev_active (heap [k]) = k; 891 ((W)heap [k])->active = k;
806 892
807 k = c; 893 k = c;
808 } 894 }
809 895
810 heap [k] = w; 896 heap [k] = w;
811 ev_active (heap [k]) = k; 897 ev_active (heap [k]) = k;
812} 898}
899#endif
813 900
814void inline_size 901void inline_size
815adjustheap (WT *heap, int N, int k) 902adjustheap (WT *heap, int N, int k)
816{ 903{
817 upheap (heap, k); 904 upheap (heap, k);
912pipecb (EV_P_ ev_io *iow, int revents) 999pipecb (EV_P_ ev_io *iow, int revents)
913{ 1000{
914#if EV_USE_EVENTFD 1001#if EV_USE_EVENTFD
915 if (evfd >= 0) 1002 if (evfd >= 0)
916 { 1003 {
917 uint64_t counter = 1; 1004 uint64_t counter;
918 read (evfd, &counter, sizeof (uint64_t)); 1005 read (evfd, &counter, sizeof (uint64_t));
919 } 1006 }
920 else 1007 else
921#endif 1008#endif
922 { 1009 {
1368void 1455void
1369ev_loop_fork (EV_P) 1456ev_loop_fork (EV_P)
1370{ 1457{
1371 postfork = 1; /* must be in line with ev_default_fork */ 1458 postfork = 1; /* must be in line with ev_default_fork */
1372} 1459}
1373
1374#endif 1460#endif
1375 1461
1376#if EV_MULTIPLICITY 1462#if EV_MULTIPLICITY
1377struct ev_loop * 1463struct ev_loop *
1378ev_default_loop_init (unsigned int flags) 1464ev_default_loop_init (unsigned int flags)
1459 EV_CB_INVOKE (p->w, p->events); 1545 EV_CB_INVOKE (p->w, p->events);
1460 } 1546 }
1461 } 1547 }
1462} 1548}
1463 1549
1550#if EV_IDLE_ENABLE
1551void inline_size
1552idle_reify (EV_P)
1553{
1554 if (expect_false (idleall))
1555 {
1556 int pri;
1557
1558 for (pri = NUMPRI; pri--; )
1559 {
1560 if (pendingcnt [pri])
1561 break;
1562
1563 if (idlecnt [pri])
1564 {
1565 queue_events (EV_A_ (W *)idles [pri], idlecnt [pri], EV_IDLE);
1566 break;
1567 }
1568 }
1569 }
1570}
1571#endif
1572
1464void inline_size 1573void inline_size
1465timers_reify (EV_P) 1574timers_reify (EV_P)
1466{ 1575{
1467 while (timercnt && ev_at (timers [1]) <= mn_now) 1576 while (timercnt && ev_at (timers [HEAP0]) <= mn_now)
1468 { 1577 {
1469 ev_timer *w = (ev_timer *)timers [1]; 1578 ev_timer *w = (ev_timer *)timers [HEAP0];
1470 1579
1471 /*assert (("inactive timer on timer heap detected", ev_is_active (w)));*/ 1580 /*assert (("inactive timer on timer heap detected", ev_is_active (w)));*/
1472 1581
1473 /* first reschedule or stop timer */ 1582 /* first reschedule or stop timer */
1474 if (w->repeat) 1583 if (w->repeat)
1477 1586
1478 ev_at (w) += w->repeat; 1587 ev_at (w) += w->repeat;
1479 if (ev_at (w) < mn_now) 1588 if (ev_at (w) < mn_now)
1480 ev_at (w) = mn_now; 1589 ev_at (w) = mn_now;
1481 1590
1482 downheap (timers, timercnt, 1); 1591 downheap (timers, timercnt, HEAP0);
1483 } 1592 }
1484 else 1593 else
1485 ev_timer_stop (EV_A_ w); /* nonrepeating: stop timer */ 1594 ev_timer_stop (EV_A_ w); /* nonrepeating: stop timer */
1486 1595
1487 ev_feed_event (EV_A_ (W)w, EV_TIMEOUT); 1596 ev_feed_event (EV_A_ (W)w, EV_TIMEOUT);
1490 1599
1491#if EV_PERIODIC_ENABLE 1600#if EV_PERIODIC_ENABLE
1492void inline_size 1601void inline_size
1493periodics_reify (EV_P) 1602periodics_reify (EV_P)
1494{ 1603{
1495 while (periodiccnt && ev_at (periodics [1]) <= ev_rt_now) 1604 while (periodiccnt && ev_at (periodics [HEAP0]) <= ev_rt_now)
1496 { 1605 {
1497 ev_periodic *w = (ev_periodic *)periodics [1]; 1606 ev_periodic *w = (ev_periodic *)periodics [HEAP0];
1498 1607
1499 /*assert (("inactive timer on periodic heap detected", ev_is_active (w)));*/ 1608 /*assert (("inactive timer on periodic heap detected", ev_is_active (w)));*/
1500 1609
1501 /* first reschedule or stop timer */ 1610 /* first reschedule or stop timer */
1502 if (w->reschedule_cb) 1611 if (w->reschedule_cb)
1508 else if (w->interval) 1617 else if (w->interval)
1509 { 1618 {
1510 ev_at (w) = w->offset + ceil ((ev_rt_now - w->offset) / w->interval) * w->interval; 1619 ev_at (w) = w->offset + ceil ((ev_rt_now - w->offset) / w->interval) * w->interval;
1511 if (ev_at (w) - ev_rt_now <= TIME_EPSILON) ev_at (w) += w->interval; 1620 if (ev_at (w) - ev_rt_now <= TIME_EPSILON) ev_at (w) += w->interval;
1512 assert (("ev_periodic timeout in the past detected while processing timers, negative interval?", ev_at (w) > ev_rt_now)); 1621 assert (("ev_periodic timeout in the past detected while processing timers, negative interval?", ev_at (w) > ev_rt_now));
1513 downheap (periodics, periodiccnt, 1); 1622 downheap (periodics, periodiccnt, HEAP0);
1514 } 1623 }
1515 else 1624 else
1516 ev_periodic_stop (EV_A_ w); /* nonrepeating: stop timer */ 1625 ev_periodic_stop (EV_A_ w); /* nonrepeating: stop timer */
1517 1626
1518 ev_feed_event (EV_A_ (W)w, EV_PERIODIC); 1627 ev_feed_event (EV_A_ (W)w, EV_PERIODIC);
1523periodics_reschedule (EV_P) 1632periodics_reschedule (EV_P)
1524{ 1633{
1525 int i; 1634 int i;
1526 1635
1527 /* adjust periodics after time jump */ 1636 /* adjust periodics after time jump */
1528 for (i = 0; i < periodiccnt; ++i) 1637 for (i = 1; i <= periodiccnt; ++i)
1529 { 1638 {
1530 ev_periodic *w = (ev_periodic *)periodics [i]; 1639 ev_periodic *w = (ev_periodic *)periodics [i];
1531 1640
1532 if (w->reschedule_cb) 1641 if (w->reschedule_cb)
1533 ev_at (w) = w->reschedule_cb (w, ev_rt_now); 1642 ev_at (w) = w->reschedule_cb (w, ev_rt_now);
1534 else if (w->interval) 1643 else if (w->interval)
1535 ev_at (w) = w->offset + ceil ((ev_rt_now - w->offset) / w->interval) * w->interval; 1644 ev_at (w) = w->offset + ceil ((ev_rt_now - w->offset) / w->interval) * w->interval;
1536 } 1645 }
1537 1646
1538 /* now rebuild the heap */ 1647 /* now rebuild the heap */
1539 for (i = periodiccnt >> 1; i--; ) 1648 for (i = periodiccnt >> 1; --i; )
1540 downheap (periodics, periodiccnt, i); 1649 downheap (periodics, periodiccnt, i + HEAP0);
1541}
1542#endif
1543
1544#if EV_IDLE_ENABLE
1545void inline_size
1546idle_reify (EV_P)
1547{
1548 if (expect_false (idleall))
1549 {
1550 int pri;
1551
1552 for (pri = NUMPRI; pri--; )
1553 {
1554 if (pendingcnt [pri])
1555 break;
1556
1557 if (idlecnt [pri])
1558 {
1559 queue_events (EV_A_ (W *)idles [pri], idlecnt [pri], EV_IDLE);
1560 break;
1561 }
1562 }
1563 }
1564} 1650}
1565#endif 1651#endif
1566 1652
1567void inline_speed 1653void inline_speed
1568time_update (EV_P_ ev_tstamp max_block) 1654time_update (EV_P_ ev_tstamp max_block)
1597 */ 1683 */
1598 for (i = 4; --i; ) 1684 for (i = 4; --i; )
1599 { 1685 {
1600 rtmn_diff = ev_rt_now - mn_now; 1686 rtmn_diff = ev_rt_now - mn_now;
1601 1687
1602 if (fabs (odiff - rtmn_diff) < MIN_TIMEJUMP) 1688 if (expect_true (fabs (odiff - rtmn_diff) < MIN_TIMEJUMP))
1603 return; /* all is well */ 1689 return; /* all is well */
1604 1690
1605 ev_rt_now = ev_time (); 1691 ev_rt_now = ev_time ();
1606 mn_now = get_clock (); 1692 mn_now = get_clock ();
1607 now_floor = mn_now; 1693 now_floor = mn_now;
1703 1789
1704 waittime = MAX_BLOCKTIME; 1790 waittime = MAX_BLOCKTIME;
1705 1791
1706 if (timercnt) 1792 if (timercnt)
1707 { 1793 {
1708 ev_tstamp to = ev_at (timers [1]) - mn_now + backend_fudge; 1794 ev_tstamp to = ev_at (timers [HEAP0]) - mn_now + backend_fudge;
1709 if (waittime > to) waittime = to; 1795 if (waittime > to) waittime = to;
1710 } 1796 }
1711 1797
1712#if EV_PERIODIC_ENABLE 1798#if EV_PERIODIC_ENABLE
1713 if (periodiccnt) 1799 if (periodiccnt)
1714 { 1800 {
1715 ev_tstamp to = ev_at (periodics [1]) - ev_rt_now + backend_fudge; 1801 ev_tstamp to = ev_at (periodics [HEAP0]) - ev_rt_now + backend_fudge;
1716 if (waittime > to) waittime = to; 1802 if (waittime > to) waittime = to;
1717 } 1803 }
1718#endif 1804#endif
1719 1805
1720 if (expect_false (waittime < timeout_blocktime)) 1806 if (expect_false (waittime < timeout_blocktime))
1890 1976
1891 ev_at (w) += mn_now; 1977 ev_at (w) += mn_now;
1892 1978
1893 assert (("ev_timer_start called with negative timer repeat value", w->repeat >= 0.)); 1979 assert (("ev_timer_start called with negative timer repeat value", w->repeat >= 0.));
1894 1980
1895 ev_start (EV_A_ (W)w, ++timercnt); 1981 ev_start (EV_A_ (W)w, ++timercnt + HEAP0 - 1);
1896 array_needsize (WT, timers, timermax, timercnt + 1, EMPTY2); 1982 array_needsize (WT, timers, timermax, timercnt + HEAP0, EMPTY2);
1897 timers [timercnt] = (WT)w; 1983 timers [ev_active (w)] = (WT)w;
1898 upheap (timers, timercnt); 1984 upheap (timers, ev_active (w));
1899 1985
1900 /*assert (("internal timer heap corruption", timers [ev_active (w)] == w));*/ 1986 /*assert (("internal timer heap corruption", timers [ev_active (w)] == w));*/
1901} 1987}
1902 1988
1903void noinline 1989void noinline
1910 { 1996 {
1911 int active = ev_active (w); 1997 int active = ev_active (w);
1912 1998
1913 assert (("internal timer heap corruption", timers [active] == (WT)w)); 1999 assert (("internal timer heap corruption", timers [active] == (WT)w));
1914 2000
1915 if (expect_true (active < timercnt)) 2001 if (expect_true (active < timercnt + HEAP0 - 1))
1916 { 2002 {
1917 timers [active] = timers [timercnt]; 2003 timers [active] = timers [timercnt + HEAP0 - 1];
1918 adjustheap (timers, timercnt, active); 2004 adjustheap (timers, timercnt, active);
1919 } 2005 }
1920 2006
1921 --timercnt; 2007 --timercnt;
1922 } 2008 }
1962 ev_at (w) = w->offset + ceil ((ev_rt_now - w->offset) / w->interval) * w->interval; 2048 ev_at (w) = w->offset + ceil ((ev_rt_now - w->offset) / w->interval) * w->interval;
1963 } 2049 }
1964 else 2050 else
1965 ev_at (w) = w->offset; 2051 ev_at (w) = w->offset;
1966 2052
1967 ev_start (EV_A_ (W)w, ++periodiccnt); 2053 ev_start (EV_A_ (W)w, ++periodiccnt + HEAP0 - 1);
1968 array_needsize (WT, periodics, periodicmax, periodiccnt + 1, EMPTY2); 2054 array_needsize (WT, periodics, periodicmax, periodiccnt + HEAP0, EMPTY2);
1969 periodics [periodiccnt] = (WT)w; 2055 periodics [ev_active (w)] = (WT)w;
1970 upheap (periodics, periodiccnt); 2056 upheap (periodics, ev_active (w));
1971 2057
1972 /*assert (("internal periodic heap corruption", periodics [ev_active (w)] == w));*/ 2058 /*assert (("internal periodic heap corruption", periodics [ev_active (w)] == w));*/
1973} 2059}
1974 2060
1975void noinline 2061void noinline
1982 { 2068 {
1983 int active = ev_active (w); 2069 int active = ev_active (w);
1984 2070
1985 assert (("internal periodic heap corruption", periodics [active] == (WT)w)); 2071 assert (("internal periodic heap corruption", periodics [active] == (WT)w));
1986 2072
1987 if (expect_true (active < periodiccnt)) 2073 if (expect_true (active < periodiccnt + HEAP0 - 1))
1988 { 2074 {
1989 periodics [active] = periodics [periodiccnt]; 2075 periodics [active] = periodics [periodiccnt + HEAP0 - 1];
1990 adjustheap (periodics, periodiccnt, active); 2076 adjustheap (periodics, periodiccnt, active);
1991 } 2077 }
1992 2078
1993 --periodiccnt; 2079 --periodiccnt;
1994 } 2080 }
2114 if (w->wd < 0) 2200 if (w->wd < 0)
2115 { 2201 {
2116 ev_timer_start (EV_A_ &w->timer); /* this is not race-free, so we still need to recheck periodically */ 2202 ev_timer_start (EV_A_ &w->timer); /* this is not race-free, so we still need to recheck periodically */
2117 2203
2118 /* monitor some parent directory for speedup hints */ 2204 /* monitor some parent directory for speedup hints */
2205 /* note that exceeding the hardcoded limit is not a correctness issue, */
2206 /* but an efficiency issue only */
2119 if ((errno == ENOENT || errno == EACCES) && strlen (w->path) < 4096) 2207 if ((errno == ENOENT || errno == EACCES) && strlen (w->path) < 4096)
2120 { 2208 {
2121 char path [4096]; 2209 char path [4096];
2122 strcpy (path, w->path); 2210 strcpy (path, w->path);
2123 2211

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines