ViewVC Help
View File | Revision Log | Show Annotations | Download File
/cvs/libev/ev.c
(Generate patch)

Comparing libev/ev.c (file contents):
Revision 1.231 by root, Mon May 5 20:47:33 2008 UTC vs.
Revision 1.242 by root, Fri May 9 14:07:19 2008 UTC

422 W w; 422 W w;
423 int events; 423 int events;
424} ANPENDING; 424} ANPENDING;
425 425
426#if EV_USE_INOTIFY 426#if EV_USE_INOTIFY
427/* hash table entry per inotify-id */
427typedef struct 428typedef struct
428{ 429{
429 WL head; 430 WL head;
430} ANFS; 431} ANFS;
432#endif
433
434/* Heap Entry */
435#define EV_HEAP_CACHE_AT 0
436#if EV_HEAP_CACHE_AT
437 typedef struct {
438 WT w;
439 ev_tstamp at;
440 } ANHE;
441
442 #define ANHE_w(he) (he).w /* access watcher, read-write */
443 #define ANHE_at(he) (he).at /* access cached at, read-only */
444 #define ANHE_at_set(he) (he).at = (he).w->at /* update at from watcher */
445#else
446 typedef WT ANHE;
447
448 #define ANHE_w(he) (he)
449 #define ANHE_at(he) (he)->at
450 #define ANHE_at_set(he)
431#endif 451#endif
432 452
433#if EV_MULTIPLICITY 453#if EV_MULTIPLICITY
434 454
435 struct ev_loop 455 struct ev_loop
520 } 540 }
521} 541}
522 542
523/*****************************************************************************/ 543/*****************************************************************************/
524 544
545#define MALLOC_ROUND 4096 /* prefer to allocate in chunks of this size, must be 2**n and >> 4 longs */
546
525int inline_size 547int inline_size
526array_nextsize (int elem, int cur, int cnt) 548array_nextsize (int elem, int cur, int cnt)
527{ 549{
528 int ncur = cur + 1; 550 int ncur = cur + 1;
529 551
530 do 552 do
531 ncur <<= 1; 553 ncur <<= 1;
532 while (cnt > ncur); 554 while (cnt > ncur);
533 555
534 /* if size > 4096, round to 4096 - 4 * longs to accomodate malloc overhead */ 556 /* if size is large, round to MALLOC_ROUND - 4 * longs to accomodate malloc overhead */
535 if (elem * ncur > 4096) 557 if (elem * ncur > MALLOC_ROUND - sizeof (void *) * 4)
536 { 558 {
537 ncur *= elem; 559 ncur *= elem;
538 ncur = (ncur + elem + 4095 + sizeof (void *) * 4) & ~4095; 560 ncur = (ncur + elem + (MALLOC_ROUND - 1) + sizeof (void *) * 4) & ~(MALLOC_ROUND - 1);
539 ncur = ncur - sizeof (void *) * 4; 561 ncur = ncur - sizeof (void *) * 4;
540 ncur /= elem; 562 ncur /= elem;
541 } 563 }
542 564
543 return ncur; 565 return ncur;
757 } 779 }
758} 780}
759 781
760/*****************************************************************************/ 782/*****************************************************************************/
761 783
784/*
785 * the heap functions want a real array index. array index 0 uis guaranteed to not
786 * be in-use at any time. the first heap entry is at array [HEAP0]. DHEAP gives
787 * the branching factor of the d-tree.
788 */
789
790/*
791 * at the moment we allow libev the luxury of two heaps,
792 * a small-code-size 2-heap one and a ~1.5kb larger 4-heap
793 * which is more cache-efficient.
794 * the difference is about 5% with 50000+ watchers.
795 */
796#define EV_USE_4HEAP !EV_MINIMAL
797#if EV_USE_4HEAP
798
799#define DHEAP 4
800#define HEAP0 (DHEAP - 1) /* index of first element in heap */
801
762/* towards the root */ 802/* towards the root */
763void inline_speed 803void inline_speed
764upheap (WT *heap, int k) 804upheap (ANHE *heap, int k)
765{ 805{
766 WT w = heap [k]; 806 ANHE he = heap [k];
767 807
768 for (;;) 808 for (;;)
769 { 809 {
770 int p = k >> 1; 810 int p = ((k - HEAP0 - 1) / DHEAP) + HEAP0;
771 811
772 /* maybe we could use a dummy element at heap [0]? */ 812 if (p == k || ANHE_at (heap [p]) <= ANHE_at (he))
773 if (!p || heap [p]->at <= w->at)
774 break; 813 break;
775 814
776 heap [k] = heap [p]; 815 heap [k] = heap [p];
777 ev_active (heap [k]) = k; 816 ev_active (ANHE_w (heap [k])) = k;
778 k = p; 817 k = p;
779 } 818 }
780 819
820 ev_active (ANHE_w (he)) = k;
781 heap [k] = w; 821 heap [k] = he;
782 ev_active (heap [k]) = k;
783} 822}
784 823
785/* away from the root */ 824/* away from the root */
786void inline_speed 825void inline_speed
787downheap (WT *heap, int N, int k) 826downheap (ANHE *heap, int N, int k)
788{ 827{
789 WT w = heap [k]; 828 ANHE he = heap [k];
829 ANHE *E = heap + N + HEAP0;
830
831 for (;;)
832 {
833 ev_tstamp minat;
834 ANHE *minpos;
835 ANHE *pos = heap + DHEAP * (k - HEAP0) + HEAP0;
836
837 // find minimum child
838 if (expect_true (pos + DHEAP - 1 < E))
839 {
840 /* fast path */ (minpos = pos + 0), (minat = ANHE_at (*minpos));
841 if (ANHE_at (pos [1]) < minat) (minpos = pos + 1), (minat = ANHE_at (*minpos));
842 if (ANHE_at (pos [2]) < minat) (minpos = pos + 2), (minat = ANHE_at (*minpos));
843 if (ANHE_at (pos [3]) < minat) (minpos = pos + 3), (minat = ANHE_at (*minpos));
844 }
845 else if (pos < E)
846 {
847 /* slow path */ (minpos = pos + 0), (minat = ANHE_at (*minpos));
848 if (pos + 1 < E && ANHE_at (pos [1]) < minat) (minpos = pos + 1), (minat = ANHE_at (*minpos));
849 if (pos + 2 < E && ANHE_at (pos [2]) < minat) (minpos = pos + 2), (minat = ANHE_at (*minpos));
850 if (pos + 3 < E && ANHE_at (pos [3]) < minat) (minpos = pos + 3), (minat = ANHE_at (*minpos));
851 }
852 else
853 break;
854
855 if (ANHE_at (he) <= minat)
856 break;
857
858 ev_active (ANHE_w (*minpos)) = k;
859 heap [k] = *minpos;
860
861 k = minpos - heap;
862 }
863
864 ev_active (ANHE_w (he)) = k;
865 heap [k] = he;
866}
867
868#else // 4HEAP
869
870#define HEAP0 1
871
872/* towards the root */
873void inline_speed
874upheap (ANHE *heap, int k)
875{
876 ANHE he = heap [k];
877
878 for (;;)
879 {
880 int p = k >> 1;
881
882 /* maybe we could use a dummy element at heap [0]? */
883 if (!p || ANHE_at (heap [p]) <= ANHE_at (he))
884 break;
885
886 heap [k] = heap [p];
887 ev_active (ANHE_w (heap [k])) = k;
888 k = p;
889 }
890
891 heap [k] = w;
892 ev_active (ANHE_w (heap [k])) = k;
893}
894
895/* away from the root */
896void inline_speed
897downheap (ANHE *heap, int N, int k)
898{
899 ANHE he = heap [k];
790 900
791 for (;;) 901 for (;;)
792 { 902 {
793 int c = k << 1; 903 int c = k << 1;
794 904
795 if (c > N) 905 if (c > N)
796 break; 906 break;
797 907
798 c += c < N && heap [c]->at > heap [c + 1]->at 908 c += c + 1 < N && ANHE_at (heap [c]) > ANHE_at (heap [c + 1])
799 ? 1 : 0; 909 ? 1 : 0;
800 910
801 if (w->at <= heap [c]->at) 911 if (w->at <= ANHE_at (heap [c]))
802 break; 912 break;
803 913
804 heap [k] = heap [c]; 914 heap [k] = heap [c];
805 ev_active (heap [k]) = k; 915 ev_active (ANHE_w (heap [k])) = k;
806 916
807 k = c; 917 k = c;
808 } 918 }
809 919
810 heap [k] = w; 920 heap [k] = he;
811 ev_active (heap [k]) = k; 921 ev_active (ANHE_w (he)) = k;
812} 922}
923#endif
813 924
814void inline_size 925void inline_size
815adjustheap (WT *heap, int N, int k) 926adjustheap (ANHE *heap, int N, int k)
816{ 927{
817 upheap (heap, k); 928 upheap (heap, k);
818 downheap (heap, N, k); 929 downheap (heap, N, k);
819} 930}
820 931
912pipecb (EV_P_ ev_io *iow, int revents) 1023pipecb (EV_P_ ev_io *iow, int revents)
913{ 1024{
914#if EV_USE_EVENTFD 1025#if EV_USE_EVENTFD
915 if (evfd >= 0) 1026 if (evfd >= 0)
916 { 1027 {
917 uint64_t counter = 1; 1028 uint64_t counter;
918 read (evfd, &counter, sizeof (uint64_t)); 1029 read (evfd, &counter, sizeof (uint64_t));
919 } 1030 }
920 else 1031 else
921#endif 1032#endif
922 { 1033 {
1368void 1479void
1369ev_loop_fork (EV_P) 1480ev_loop_fork (EV_P)
1370{ 1481{
1371 postfork = 1; /* must be in line with ev_default_fork */ 1482 postfork = 1; /* must be in line with ev_default_fork */
1372} 1483}
1373
1374#endif 1484#endif
1375 1485
1376#if EV_MULTIPLICITY 1486#if EV_MULTIPLICITY
1377struct ev_loop * 1487struct ev_loop *
1378ev_default_loop_init (unsigned int flags) 1488ev_default_loop_init (unsigned int flags)
1459 EV_CB_INVOKE (p->w, p->events); 1569 EV_CB_INVOKE (p->w, p->events);
1460 } 1570 }
1461 } 1571 }
1462} 1572}
1463 1573
1574#if EV_IDLE_ENABLE
1575void inline_size
1576idle_reify (EV_P)
1577{
1578 if (expect_false (idleall))
1579 {
1580 int pri;
1581
1582 for (pri = NUMPRI; pri--; )
1583 {
1584 if (pendingcnt [pri])
1585 break;
1586
1587 if (idlecnt [pri])
1588 {
1589 queue_events (EV_A_ (W *)idles [pri], idlecnt [pri], EV_IDLE);
1590 break;
1591 }
1592 }
1593 }
1594}
1595#endif
1596
1464void inline_size 1597void inline_size
1465timers_reify (EV_P) 1598timers_reify (EV_P)
1466{ 1599{
1467 while (timercnt && ev_at (timers [1]) <= mn_now) 1600 while (timercnt && ANHE_at (timers [HEAP0]) <= mn_now)
1468 { 1601 {
1469 ev_timer *w = (ev_timer *)timers [1]; 1602 ev_timer *w = (ev_timer *)ANHE_w (timers [HEAP0]);
1470 1603
1471 /*assert (("inactive timer on timer heap detected", ev_is_active (w)));*/ 1604 /*assert (("inactive timer on timer heap detected", ev_is_active (w)));*/
1472 1605
1473 /* first reschedule or stop timer */ 1606 /* first reschedule or stop timer */
1474 if (w->repeat) 1607 if (w->repeat)
1477 1610
1478 ev_at (w) += w->repeat; 1611 ev_at (w) += w->repeat;
1479 if (ev_at (w) < mn_now) 1612 if (ev_at (w) < mn_now)
1480 ev_at (w) = mn_now; 1613 ev_at (w) = mn_now;
1481 1614
1615 ANHE_at_set (timers [HEAP0]);
1482 downheap (timers, timercnt, 1); 1616 downheap (timers, timercnt, HEAP0);
1483 } 1617 }
1484 else 1618 else
1485 ev_timer_stop (EV_A_ w); /* nonrepeating: stop timer */ 1619 ev_timer_stop (EV_A_ w); /* nonrepeating: stop timer */
1486 1620
1487 ev_feed_event (EV_A_ (W)w, EV_TIMEOUT); 1621 ev_feed_event (EV_A_ (W)w, EV_TIMEOUT);
1490 1624
1491#if EV_PERIODIC_ENABLE 1625#if EV_PERIODIC_ENABLE
1492void inline_size 1626void inline_size
1493periodics_reify (EV_P) 1627periodics_reify (EV_P)
1494{ 1628{
1495 while (periodiccnt && ev_at (periodics [1]) <= ev_rt_now) 1629 while (periodiccnt && ANHE_at (periodics [HEAP0]) <= ev_rt_now)
1496 { 1630 {
1497 ev_periodic *w = (ev_periodic *)periodics [1]; 1631 ev_periodic *w = (ev_periodic *)ANHE_w (periodics [HEAP0]);
1498 1632
1499 /*assert (("inactive timer on periodic heap detected", ev_is_active (w)));*/ 1633 /*assert (("inactive timer on periodic heap detected", ev_is_active (w)));*/
1500 1634
1501 /* first reschedule or stop timer */ 1635 /* first reschedule or stop timer */
1502 if (w->reschedule_cb) 1636 if (w->reschedule_cb)
1503 { 1637 {
1504 ev_at (w) = w->reschedule_cb (w, ev_rt_now + TIME_EPSILON); 1638 ev_at (w) = w->reschedule_cb (w, ev_rt_now + TIME_EPSILON);
1505 assert (("ev_periodic reschedule callback returned time in the past", ev_at (w) > ev_rt_now)); 1639 assert (("ev_periodic reschedule callback returned time in the past", ev_at (w) > ev_rt_now));
1640 ANHE_at_set (periodics [HEAP0]);
1506 downheap (periodics, periodiccnt, 1); 1641 downheap (periodics, periodiccnt, HEAP0);
1507 } 1642 }
1508 else if (w->interval) 1643 else if (w->interval)
1509 { 1644 {
1510 ev_at (w) = w->offset + ceil ((ev_rt_now - w->offset) / w->interval) * w->interval; 1645 ev_at (w) = w->offset + ceil ((ev_rt_now - w->offset) / w->interval) * w->interval;
1511 if (ev_at (w) - ev_rt_now <= TIME_EPSILON) ev_at (w) += w->interval; 1646 if (ev_at (w) - ev_rt_now <= TIME_EPSILON) ev_at (w) += w->interval;
1512 assert (("ev_periodic timeout in the past detected while processing timers, negative interval?", ev_at (w) > ev_rt_now)); 1647 assert (("ev_periodic timeout in the past detected while processing timers, negative interval?", ev_at (w) > ev_rt_now));
1648 ANHE_at_set (periodics [HEAP0]);
1513 downheap (periodics, periodiccnt, 1); 1649 downheap (periodics, periodiccnt, HEAP0);
1514 } 1650 }
1515 else 1651 else
1516 ev_periodic_stop (EV_A_ w); /* nonrepeating: stop timer */ 1652 ev_periodic_stop (EV_A_ w); /* nonrepeating: stop timer */
1517 1653
1518 ev_feed_event (EV_A_ (W)w, EV_PERIODIC); 1654 ev_feed_event (EV_A_ (W)w, EV_PERIODIC);
1523periodics_reschedule (EV_P) 1659periodics_reschedule (EV_P)
1524{ 1660{
1525 int i; 1661 int i;
1526 1662
1527 /* adjust periodics after time jump */ 1663 /* adjust periodics after time jump */
1528 for (i = 1; i <= periodiccnt; ++i) 1664 for (i = HEAP0; i < periodiccnt + HEAP0; ++i)
1529 { 1665 {
1530 ev_periodic *w = (ev_periodic *)periodics [i]; 1666 ev_periodic *w = (ev_periodic *)ANHE_w (periodics [i]);
1531 1667
1532 if (w->reschedule_cb) 1668 if (w->reschedule_cb)
1533 ev_at (w) = w->reschedule_cb (w, ev_rt_now); 1669 ev_at (w) = w->reschedule_cb (w, ev_rt_now);
1534 else if (w->interval) 1670 else if (w->interval)
1535 ev_at (w) = w->offset + ceil ((ev_rt_now - w->offset) / w->interval) * w->interval; 1671 ev_at (w) = w->offset + ceil ((ev_rt_now - w->offset) / w->interval) * w->interval;
1536 }
1537 1672
1538 /* now rebuild the heap */ 1673 ANHE_at_set (periodics [i]);
1674 }
1675
1676 /* now rebuild the heap, this for the 2-heap, inefficient for the 4-heap, but correct */
1539 for (i = periodiccnt >> 1; i--; ) 1677 for (i = periodiccnt >> 1; --i; )
1540 downheap (periodics, periodiccnt, i); 1678 downheap (periodics, periodiccnt, i + HEAP0);
1541}
1542#endif
1543
1544#if EV_IDLE_ENABLE
1545void inline_size
1546idle_reify (EV_P)
1547{
1548 if (expect_false (idleall))
1549 {
1550 int pri;
1551
1552 for (pri = NUMPRI; pri--; )
1553 {
1554 if (pendingcnt [pri])
1555 break;
1556
1557 if (idlecnt [pri])
1558 {
1559 queue_events (EV_A_ (W *)idles [pri], idlecnt [pri], EV_IDLE);
1560 break;
1561 }
1562 }
1563 }
1564} 1679}
1565#endif 1680#endif
1566 1681
1567void inline_speed 1682void inline_speed
1568time_update (EV_P_ ev_tstamp max_block) 1683time_update (EV_P_ ev_tstamp max_block)
1597 */ 1712 */
1598 for (i = 4; --i; ) 1713 for (i = 4; --i; )
1599 { 1714 {
1600 rtmn_diff = ev_rt_now - mn_now; 1715 rtmn_diff = ev_rt_now - mn_now;
1601 1716
1602 if (fabs (odiff - rtmn_diff) < MIN_TIMEJUMP) 1717 if (expect_true (fabs (odiff - rtmn_diff) < MIN_TIMEJUMP))
1603 return; /* all is well */ 1718 return; /* all is well */
1604 1719
1605 ev_rt_now = ev_time (); 1720 ev_rt_now = ev_time ();
1606 mn_now = get_clock (); 1721 mn_now = get_clock ();
1607 now_floor = mn_now; 1722 now_floor = mn_now;
1622 { 1737 {
1623#if EV_PERIODIC_ENABLE 1738#if EV_PERIODIC_ENABLE
1624 periodics_reschedule (EV_A); 1739 periodics_reschedule (EV_A);
1625#endif 1740#endif
1626 /* adjust timers. this is easy, as the offset is the same for all of them */ 1741 /* adjust timers. this is easy, as the offset is the same for all of them */
1627 for (i = 1; i <= timercnt; ++i) 1742 for (i = 0; i < timercnt; ++i)
1628 ev_at (timers [i]) += ev_rt_now - mn_now; 1743 {
1744 ANHE *he = timers + i + HEAP0;
1745 ANHE_w (*he)->at += ev_rt_now - mn_now;
1746 ANHE_at_set (*he);
1747 }
1629 } 1748 }
1630 1749
1631 mn_now = ev_rt_now; 1750 mn_now = ev_rt_now;
1632 } 1751 }
1633} 1752}
1703 1822
1704 waittime = MAX_BLOCKTIME; 1823 waittime = MAX_BLOCKTIME;
1705 1824
1706 if (timercnt) 1825 if (timercnt)
1707 { 1826 {
1708 ev_tstamp to = ev_at (timers [1]) - mn_now + backend_fudge; 1827 ev_tstamp to = ANHE_at (timers [HEAP0]) - mn_now + backend_fudge;
1709 if (waittime > to) waittime = to; 1828 if (waittime > to) waittime = to;
1710 } 1829 }
1711 1830
1712#if EV_PERIODIC_ENABLE 1831#if EV_PERIODIC_ENABLE
1713 if (periodiccnt) 1832 if (periodiccnt)
1714 { 1833 {
1715 ev_tstamp to = ev_at (periodics [1]) - ev_rt_now + backend_fudge; 1834 ev_tstamp to = ANHE_at (periodics [HEAP0]) - ev_rt_now + backend_fudge;
1716 if (waittime > to) waittime = to; 1835 if (waittime > to) waittime = to;
1717 } 1836 }
1718#endif 1837#endif
1719 1838
1720 if (expect_false (waittime < timeout_blocktime)) 1839 if (expect_false (waittime < timeout_blocktime))
1872{ 1991{
1873 clear_pending (EV_A_ (W)w); 1992 clear_pending (EV_A_ (W)w);
1874 if (expect_false (!ev_is_active (w))) 1993 if (expect_false (!ev_is_active (w)))
1875 return; 1994 return;
1876 1995
1877 assert (("ev_io_start called with illegal fd (must stay constant after start!)", w->fd >= 0 && w->fd < anfdmax)); 1996 assert (("ev_io_stop called with illegal fd (must stay constant after start!)", w->fd >= 0 && w->fd < anfdmax));
1878 1997
1879 wlist_del (&anfds[w->fd].head, (WL)w); 1998 wlist_del (&anfds[w->fd].head, (WL)w);
1880 ev_stop (EV_A_ (W)w); 1999 ev_stop (EV_A_ (W)w);
1881 2000
1882 fd_change (EV_A_ w->fd, 1); 2001 fd_change (EV_A_ w->fd, 1);
1890 2009
1891 ev_at (w) += mn_now; 2010 ev_at (w) += mn_now;
1892 2011
1893 assert (("ev_timer_start called with negative timer repeat value", w->repeat >= 0.)); 2012 assert (("ev_timer_start called with negative timer repeat value", w->repeat >= 0.));
1894 2013
1895 ev_start (EV_A_ (W)w, ++timercnt); 2014 ev_start (EV_A_ (W)w, ++timercnt + HEAP0 - 1);
1896 array_needsize (WT, timers, timermax, timercnt + 1, EMPTY2); 2015 array_needsize (ANHE, timers, timermax, ev_active (w) + 1, EMPTY2);
1897 timers [timercnt] = (WT)w; 2016 ANHE_w (timers [ev_active (w)]) = (WT)w;
2017 ANHE_at_set (timers [ev_active (w)]);
1898 upheap (timers, timercnt); 2018 upheap (timers, ev_active (w));
1899 2019
1900 /*assert (("internal timer heap corruption", timers [ev_active (w)] == w));*/ 2020 /*assert (("internal timer heap corruption", timers [ev_active (w)] == (WT)w));*/
1901} 2021}
1902 2022
1903void noinline 2023void noinline
1904ev_timer_stop (EV_P_ ev_timer *w) 2024ev_timer_stop (EV_P_ ev_timer *w)
1905{ 2025{
1908 return; 2028 return;
1909 2029
1910 { 2030 {
1911 int active = ev_active (w); 2031 int active = ev_active (w);
1912 2032
1913 assert (("internal timer heap corruption", timers [active] == (WT)w)); 2033 assert (("internal timer heap corruption", ANHE_w (timers [active]) == (WT)w));
1914 2034
1915 if (expect_true (active < timercnt)) 2035 if (expect_true (active < timercnt + HEAP0 - 1))
1916 { 2036 {
1917 timers [active] = timers [timercnt]; 2037 timers [active] = timers [timercnt + HEAP0 - 1];
1918 adjustheap (timers, timercnt, active); 2038 adjustheap (timers, timercnt, active);
1919 } 2039 }
1920 2040
1921 --timercnt; 2041 --timercnt;
1922 } 2042 }
1932 if (ev_is_active (w)) 2052 if (ev_is_active (w))
1933 { 2053 {
1934 if (w->repeat) 2054 if (w->repeat)
1935 { 2055 {
1936 ev_at (w) = mn_now + w->repeat; 2056 ev_at (w) = mn_now + w->repeat;
2057 ANHE_at_set (timers [ev_active (w)]);
1937 adjustheap (timers, timercnt, ev_active (w)); 2058 adjustheap (timers, timercnt, ev_active (w));
1938 } 2059 }
1939 else 2060 else
1940 ev_timer_stop (EV_A_ w); 2061 ev_timer_stop (EV_A_ w);
1941 } 2062 }
1962 ev_at (w) = w->offset + ceil ((ev_rt_now - w->offset) / w->interval) * w->interval; 2083 ev_at (w) = w->offset + ceil ((ev_rt_now - w->offset) / w->interval) * w->interval;
1963 } 2084 }
1964 else 2085 else
1965 ev_at (w) = w->offset; 2086 ev_at (w) = w->offset;
1966 2087
1967 ev_start (EV_A_ (W)w, ++periodiccnt); 2088 ev_start (EV_A_ (W)w, ++periodiccnt + HEAP0 - 1);
1968 array_needsize (WT, periodics, periodicmax, periodiccnt + 1, EMPTY2); 2089 array_needsize (ANHE, periodics, periodicmax, ev_active (w) + 1, EMPTY2);
1969 periodics [periodiccnt] = (WT)w; 2090 ANHE_w (periodics [ev_active (w)]) = (WT)w;
1970 upheap (periodics, periodiccnt); 2091 upheap (periodics, ev_active (w));
1971 2092
1972 /*assert (("internal periodic heap corruption", periodics [ev_active (w)] == w));*/ 2093 /*assert (("internal periodic heap corruption", ANHE_w (periodics [ev_active (w)]) == (WT)w));*/
1973} 2094}
1974 2095
1975void noinline 2096void noinline
1976ev_periodic_stop (EV_P_ ev_periodic *w) 2097ev_periodic_stop (EV_P_ ev_periodic *w)
1977{ 2098{
1980 return; 2101 return;
1981 2102
1982 { 2103 {
1983 int active = ev_active (w); 2104 int active = ev_active (w);
1984 2105
1985 assert (("internal periodic heap corruption", periodics [active] == (WT)w)); 2106 assert (("internal periodic heap corruption", ANHE_w (periodics [active]) == (WT)w));
1986 2107
1987 if (expect_true (active < periodiccnt)) 2108 if (expect_true (active < periodiccnt + HEAP0 - 1))
1988 { 2109 {
1989 periodics [active] = periodics [periodiccnt]; 2110 periodics [active] = periodics [periodiccnt + HEAP0 - 1];
1990 adjustheap (periodics, periodiccnt, active); 2111 adjustheap (periodics, periodiccnt, active);
1991 } 2112 }
1992 2113
1993 --periodiccnt; 2114 --periodiccnt;
1994 } 2115 }
2114 if (w->wd < 0) 2235 if (w->wd < 0)
2115 { 2236 {
2116 ev_timer_start (EV_A_ &w->timer); /* this is not race-free, so we still need to recheck periodically */ 2237 ev_timer_start (EV_A_ &w->timer); /* this is not race-free, so we still need to recheck periodically */
2117 2238
2118 /* monitor some parent directory for speedup hints */ 2239 /* monitor some parent directory for speedup hints */
2240 /* note that exceeding the hardcoded limit is not a correctness issue, */
2241 /* but an efficiency issue only */
2119 if ((errno == ENOENT || errno == EACCES) && strlen (w->path) < 4096) 2242 if ((errno == ENOENT || errno == EACCES) && strlen (w->path) < 4096)
2120 { 2243 {
2121 char path [4096]; 2244 char path [4096];
2122 strcpy (path, w->path); 2245 strcpy (path, w->path);
2123 2246

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines