… | |
… | |
536 | clRetainEvent (event); |
536 | clRetainEvent (event); |
537 | eq_enq (&eq_event_vtbl, user_data, (void *)event, (void *)(IV)event_command_exec_status, 0); |
537 | eq_enq (&eq_event_vtbl, user_data, (void *)event, (void *)(IV)event_command_exec_status, 0); |
538 | } |
538 | } |
539 | |
539 | |
540 | /*****************************************************************************/ |
540 | /*****************************************************************************/ |
541 | /* mapped_xxx */ |
541 | /* utilities for XS code */ |
542 | |
|
|
543 | static OpenCL__Mapped |
|
|
544 | SvMAPPED (SV *sv) |
|
|
545 | { |
|
|
546 | // no typechecking atm., keep your fingers crossed |
|
|
547 | return (OpenCL__Mapped)SvMAGIC (SvRV (sv))->mg_ptr; |
|
|
548 | } |
|
|
549 | |
|
|
550 | struct mapped |
|
|
551 | { |
|
|
552 | cl_command_queue queue; |
|
|
553 | cl_mem memobj; |
|
|
554 | void *ptr; |
|
|
555 | size_t cb; |
|
|
556 | cl_event event; |
|
|
557 | size_t row_pitch; |
|
|
558 | size_t slice_pitch; |
|
|
559 | }; |
|
|
560 | |
|
|
561 | static SV * |
|
|
562 | mapped_new (HV *stash, cl_command_queue queue, cl_mem memobj, cl_map_flags flags, void *ptr, size_t cb, cl_event ev, size_t row_pitch, size_t slice_pitch) |
|
|
563 | { |
|
|
564 | SV *data = newSV (0); |
|
|
565 | SvUPGRADE (data, SVt_PVMG); |
|
|
566 | |
|
|
567 | OpenCL__Mapped mapped; |
|
|
568 | New (0, mapped, 1, struct mapped); |
|
|
569 | |
|
|
570 | clRetainCommandQueue (queue); |
|
|
571 | |
|
|
572 | mapped->queue = queue; |
|
|
573 | mapped->memobj = memobj; |
|
|
574 | mapped->ptr = ptr; |
|
|
575 | mapped->cb = cb; |
|
|
576 | mapped->event = ev; |
|
|
577 | mapped->row_pitch = row_pitch; |
|
|
578 | mapped->slice_pitch = slice_pitch; |
|
|
579 | |
|
|
580 | sv_magicext (data, 0, PERL_MAGIC_ext, 0, (char *)mapped, 0); |
|
|
581 | |
|
|
582 | if (!(flags & CL_MAP_WRITE)) |
|
|
583 | SvREADONLY_on (data); |
|
|
584 | |
|
|
585 | if (SvLEN (data)) |
|
|
586 | Safefree (data); |
|
|
587 | |
|
|
588 | SvPVX (data) = (char *)ptr; |
|
|
589 | SvCUR_set (data, cb); |
|
|
590 | SvLEN_set (data, 0); |
|
|
591 | SvPOK_only (data); |
|
|
592 | |
|
|
593 | return sv_2mortal (sv_bless (newRV_noinc (data), stash)); |
|
|
594 | } |
|
|
595 | |
|
|
596 | static void |
|
|
597 | mapped_detach (SV *sv, OpenCL__Mapped mapped) |
|
|
598 | { |
|
|
599 | SV *data = SvRV (sv); |
|
|
600 | |
|
|
601 | if (SvPVX (data) != (char *)mapped->ptr) |
|
|
602 | warn ("FATAL: OpenCL memory mapped scalar changed location, detected"); |
|
|
603 | else |
|
|
604 | { |
|
|
605 | SvREADONLY_off (data); |
|
|
606 | SvCUR_set (data, 0); |
|
|
607 | SvPVX (data) = 0; |
|
|
608 | SvOK_off (data); |
|
|
609 | } |
|
|
610 | |
|
|
611 | mapped->ptr = 0; |
|
|
612 | } |
|
|
613 | |
|
|
614 | /*****************************************************************************/ |
|
|
615 | |
542 | |
616 | static size_t |
543 | static size_t |
617 | img_row_pitch (cl_mem img) |
544 | img_row_pitch (cl_mem img) |
618 | { |
545 | { |
619 | size_t res; |
546 | size_t res; |
… | |
… | |
624 | static cl_event * |
551 | static cl_event * |
625 | event_list (SV **items, cl_uint *rcount, cl_event extra) |
552 | event_list (SV **items, cl_uint *rcount, cl_event extra) |
626 | { |
553 | { |
627 | cl_uint count = *rcount; |
554 | cl_uint count = *rcount; |
628 | |
555 | |
|
|
556 | if (count > 0x7fffffff) // yeha, it's a hack - the caller might have underflowed |
|
|
557 | count = 0; |
|
|
558 | |
629 | if (!count && !extra) |
559 | if (!count && !extra) |
630 | return 0; |
560 | return 0; |
631 | |
561 | |
632 | cl_event *list = tmpbuf (sizeof (cl_event) * (count + 1)); |
562 | cl_event *list = tmpbuf (sizeof (cl_event) * (count + 1)); |
633 | int i = 0; |
563 | int i = 0; |
634 | |
564 | |
635 | while (count) |
565 | while (count--) |
636 | { |
|
|
637 | --count; |
|
|
638 | if (SvOK (items [count])) |
566 | if (SvOK (items [count])) |
639 | list [i++] = SvCLOBJ ("clEnqueue", "wait_events", items [count], "OpenCL::Event"); |
567 | list [i++] = SvCLOBJ ("clEnqueue", "wait_events", items [count], "OpenCL::Event"); |
640 | } |
|
|
641 | |
568 | |
642 | if (extra) |
569 | if (extra) |
643 | list [i++] = extra; |
570 | list [i++] = extra; |
644 | |
571 | |
645 | *rcount = i; |
572 | *rcount = i; |
646 | |
573 | |
647 | return i ? list : 0; |
574 | return i ? list : 0; |
648 | } |
575 | } |
649 | |
576 | |
650 | #define EVENT_LIST(skip) \ |
577 | #define EVENT_LIST(skip) \ |
651 | cl_uint event_list_count = items - skip; \ |
578 | cl_uint event_list_count = items - (skip); \ |
652 | cl_event *event_list_ptr = event_list (&ST (skip), &event_list_count, 0) |
579 | cl_event *event_list_ptr = event_list (&ST (skip), &event_list_count, 0) |
653 | |
580 | |
654 | #define INFO(class) \ |
581 | #define INFO(class) \ |
655 | { \ |
582 | { \ |
656 | size_t size; \ |
583 | size_t size; \ |
… | |
… | |
660 | SvPOK_only (sv); \ |
587 | SvPOK_only (sv); \ |
661 | SvCUR_set (sv, size); \ |
588 | SvCUR_set (sv, size); \ |
662 | NEED_SUCCESS (Get ## class ## Info, (self, name, size, SvPVX (sv), 0)); \ |
589 | NEED_SUCCESS (Get ## class ## Info, (self, name, size, SvPVX (sv), 0)); \ |
663 | XPUSHs (sv); \ |
590 | XPUSHs (sv); \ |
664 | } |
591 | } |
|
|
592 | |
|
|
593 | /*****************************************************************************/ |
|
|
594 | /* mapped_xxx */ |
|
|
595 | |
|
|
596 | static OpenCL__Mapped |
|
|
597 | SvMAPPED (SV *sv) |
|
|
598 | { |
|
|
599 | // no typechecking atm., keep your fingers crossed |
|
|
600 | return (OpenCL__Mapped)SvMAGIC (SvRV (sv))->mg_ptr; |
|
|
601 | } |
|
|
602 | |
|
|
603 | struct mapped |
|
|
604 | { |
|
|
605 | cl_command_queue queue; |
|
|
606 | cl_mem memobj; |
|
|
607 | void *ptr; |
|
|
608 | size_t cb; |
|
|
609 | cl_event event; |
|
|
610 | size_t row_pitch; |
|
|
611 | size_t slice_pitch; |
|
|
612 | }; |
|
|
613 | |
|
|
614 | static SV * |
|
|
615 | mapped_new (HV *stash, cl_command_queue queue, cl_mem memobj, cl_map_flags flags, void *ptr, size_t cb, cl_event ev, size_t row_pitch, size_t slice_pitch) |
|
|
616 | { |
|
|
617 | SV *data = newSV (0); |
|
|
618 | SvUPGRADE (data, SVt_PVMG); |
|
|
619 | |
|
|
620 | OpenCL__Mapped mapped; |
|
|
621 | New (0, mapped, 1, struct mapped); |
|
|
622 | |
|
|
623 | clRetainCommandQueue (queue); |
|
|
624 | |
|
|
625 | mapped->queue = queue; |
|
|
626 | mapped->memobj = memobj; |
|
|
627 | mapped->ptr = ptr; |
|
|
628 | mapped->cb = cb; |
|
|
629 | mapped->event = ev; |
|
|
630 | mapped->row_pitch = row_pitch; |
|
|
631 | mapped->slice_pitch = slice_pitch; |
|
|
632 | |
|
|
633 | sv_magicext (data, 0, PERL_MAGIC_ext, 0, (char *)mapped, 0); |
|
|
634 | |
|
|
635 | if (SvLEN (data)) |
|
|
636 | Safefree (data); |
|
|
637 | |
|
|
638 | SvPVX (data) = (char *)ptr; |
|
|
639 | SvCUR_set (data, cb); |
|
|
640 | SvLEN_set (data, 0); |
|
|
641 | SvPOK_only (data); |
|
|
642 | |
|
|
643 | SV *obj = sv_2mortal (sv_bless (newRV_noinc (data), stash)); |
|
|
644 | |
|
|
645 | if (!(flags & CL_MAP_WRITE)) |
|
|
646 | SvREADONLY_on (data); |
|
|
647 | |
|
|
648 | return obj; |
|
|
649 | } |
|
|
650 | |
|
|
651 | static void |
|
|
652 | mapped_detach (SV *sv, OpenCL__Mapped mapped) |
|
|
653 | { |
|
|
654 | SV *data = SvRV (sv); |
|
|
655 | |
|
|
656 | // the next check checks both before AND after detach, where SvPVX should STILL be 0 |
|
|
657 | if (SvPVX (data) != (char *)mapped->ptr) |
|
|
658 | warn ("FATAL: OpenCL memory mapped scalar changed location, detected"); |
|
|
659 | else |
|
|
660 | { |
|
|
661 | SvREADONLY_off (data); |
|
|
662 | SvCUR_set (data, 0); |
|
|
663 | SvPVX (data) = 0; |
|
|
664 | SvOK_off (data); |
|
|
665 | } |
|
|
666 | |
|
|
667 | mapped->ptr = 0; |
|
|
668 | } |
|
|
669 | |
|
|
670 | static void |
|
|
671 | mapped_unmap (SV *self, OpenCL__Mapped mapped, cl_command_queue queue, SV **wait_list, cl_uint event_list_count) |
|
|
672 | { |
|
|
673 | cl_event *event_list_ptr = event_list (wait_list, &event_list_count, mapped->event); |
|
|
674 | cl_event ev; |
|
|
675 | |
|
|
676 | NEED_SUCCESS (EnqueueUnmapMemObject, (queue, mapped->memobj, mapped->ptr, event_list_count, event_list_ptr, &ev)); |
|
|
677 | |
|
|
678 | clReleaseEvent (mapped->event); |
|
|
679 | mapped->event = ev; |
|
|
680 | |
|
|
681 | mapped_detach (self, mapped); |
|
|
682 | } |
|
|
683 | |
|
|
684 | /*****************************************************************************/ |
665 | |
685 | |
666 | MODULE = OpenCL PACKAGE = OpenCL |
686 | MODULE = OpenCL PACKAGE = OpenCL |
667 | |
687 | |
668 | PROTOTYPES: ENABLE |
688 | PROTOTYPES: ENABLE |
669 | |
689 | |
… | |
… | |
1612 | map_buffer (OpenCL::Queue self, OpenCL::Buffer buf, cl_bool blocking = 1, cl_map_flags map_flags = CL_MAP_READ | CL_MAP_WRITE, size_t offset = 0, size_t cb = 0, ...) |
1632 | map_buffer (OpenCL::Queue self, OpenCL::Buffer buf, cl_bool blocking = 1, cl_map_flags map_flags = CL_MAP_READ | CL_MAP_WRITE, size_t offset = 0, size_t cb = 0, ...) |
1613 | ALIAS: |
1633 | ALIAS: |
1614 | enqueue_map_buffer = 0 |
1634 | enqueue_map_buffer = 0 |
1615 | PPCODE: |
1635 | PPCODE: |
1616 | cl_event ev; |
1636 | cl_event ev; |
1617 | EVENT_LIST (7); |
1637 | EVENT_LIST (6); |
1618 | |
1638 | |
1619 | NEED_SUCCESS_ARG (void *ptr, EnqueueMapBuffer, (self, buf, blocking, map_flags, offset, cb, event_list_count, event_list_ptr, &ev, &res)); |
1639 | NEED_SUCCESS_ARG (void *ptr, EnqueueMapBuffer, (self, buf, blocking, map_flags, offset, cb, event_list_count, event_list_ptr, &ev, &res)); |
1620 | XPUSHs (mapped_new (stash_mappedbuffer, self, buf, map_flags, ptr, cb, ev, 0, 0)); |
1640 | XPUSHs (mapped_new (stash_mappedbuffer, self, buf, map_flags, ptr, cb, ev, 0, 0)); |
1621 | |
1641 | |
1622 | void |
1642 | void |
… | |
… | |
1626 | PPCODE: |
1646 | PPCODE: |
1627 | cl_event ev; |
1647 | cl_event ev; |
1628 | const size_t origin[3] = { x, y, z }; |
1648 | const size_t origin[3] = { x, y, z }; |
1629 | const size_t region[3] = { width, height, depth }; |
1649 | const size_t region[3] = { width, height, depth }; |
1630 | size_t row_pitch, slice_pitch; |
1650 | size_t row_pitch, slice_pitch; |
1631 | EVENT_LIST (11); |
1651 | EVENT_LIST (10); |
1632 | |
1652 | |
1633 | NEED_SUCCESS_ARG (void *ptr, EnqueueMapImage, (self, img, blocking, map_flags, origin, region, &row_pitch, &slice_pitch, event_list_count, event_list_ptr, &ev, &res)); |
1653 | NEED_SUCCESS_ARG (void *ptr, EnqueueMapImage, (self, img, blocking, map_flags, origin, region, &row_pitch, &slice_pitch, event_list_count, event_list_ptr, &ev, &res)); |
1634 | |
1654 | |
1635 | size_t cb = slice_pitch ? slice_pitch * region [2] |
1655 | size_t cb = slice_pitch ? slice_pitch * region [2] |
1636 | : row_pitch ? row_pitch * region [1] |
1656 | : row_pitch ? row_pitch * region [1] |
… | |
… | |
1639 | XPUSHs (mapped_new (stash_mappedimage, self, img, map_flags, ptr, cb, ev, row_pitch, slice_pitch)); |
1659 | XPUSHs (mapped_new (stash_mappedimage, self, img, map_flags, ptr, cb, ev, row_pitch, slice_pitch)); |
1640 | |
1660 | |
1641 | void |
1661 | void |
1642 | unmap (OpenCL::Queue self, OpenCL::Mapped mapped, ...) |
1662 | unmap (OpenCL::Queue self, OpenCL::Mapped mapped, ...) |
1643 | PPCODE: |
1663 | PPCODE: |
1644 | cl_uint event_list_count = items - 2; |
1664 | mapped_unmap (ST (1), mapped, self, &ST (2), items - 2); |
1645 | cl_event *event_list_ptr = event_list (&ST (2), &event_list_count, mapped->event); |
|
|
1646 | |
|
|
1647 | NEED_SUCCESS (EnqueueUnmapMemObject, (self, mapped->memobj, mapped->ptr, event_list_count, event_list_ptr, &mapped->event)); |
|
|
1648 | mapped_detach (ST (1), mapped); |
|
|
1649 | |
|
|
1650 | if (GIMME_V != G_VOID) |
1665 | if (GIMME_V != G_VOID) |
|
|
1666 | { |
|
|
1667 | clRetainEvent (mapped->event); |
1651 | XPUSH_CLOBJ (stash_event, mapped->event); |
1668 | XPUSH_CLOBJ (stash_event, mapped->event); |
|
|
1669 | } |
1652 | |
1670 | |
1653 | void |
1671 | void |
1654 | task (OpenCL::Queue self, OpenCL::Kernel kernel, ...) |
1672 | task (OpenCL::Queue self, OpenCL::Kernel kernel, ...) |
1655 | ALIAS: |
1673 | ALIAS: |
1656 | enqueue_task = 0 |
1674 | enqueue_task = 0 |
… | |
… | |
2708 | |
2726 | |
2709 | clEnqueueUnmapMemObject (mapped->queue, mapped->memobj, mapped->ptr, 1, &mapped->event, 0); |
2727 | clEnqueueUnmapMemObject (mapped->queue, mapped->memobj, mapped->ptr, 1, &mapped->event, 0); |
2710 | mapped_detach (self, mapped); |
2728 | mapped_detach (self, mapped); |
2711 | |
2729 | |
2712 | clReleaseCommandQueue (mapped->queue); |
2730 | clReleaseCommandQueue (mapped->queue); |
2713 | |
|
|
2714 | if (mapped->event) |
|
|
2715 | clReleaseEvent (mapped->event); |
2731 | clReleaseEvent (mapped->event); |
2716 | |
|
|
2717 | Safefree (mapped); |
2732 | Safefree (mapped); |
|
|
2733 | |
|
|
2734 | void |
|
|
2735 | unmap (OpenCL::Mapped self, ...) |
|
|
2736 | CODE: |
|
|
2737 | mapped_unmap (ST (0), self, self->queue, &ST (1), items - 1); |
2718 | |
2738 | |
2719 | bool |
2739 | bool |
2720 | mapped (OpenCL::Mapped self) |
2740 | mapped (OpenCL::Mapped self) |
2721 | CODE: |
2741 | CODE: |
2722 | RETVAL = !!self->ptr; |
2742 | RETVAL = !!self->ptr; |