OpenCL C++ Bindings
cl2.hpp
Go to the documentation of this file.
1 /*******************************************************************************
2  * Copyright (c) 2008-2016 The Khronos Group Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and/or associated documentation files (the
6  * "Materials"), to deal in the Materials without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sublicense, and/or sell copies of the Materials, and to
9  * permit persons to whom the Materials are furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included
13  * in all copies or substantial portions of the Materials.
14  *
15  * MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS
16  * KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS
17  * SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT
18  * https://www.khronos.org/registry/
19  *
20  * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
21  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
22  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
23  * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
24  * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
25  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
26  * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
27  ******************************************************************************/
28 
314 
327 
391 #ifndef CL_HPP_
392 #define CL_HPP_
393 
394 /* Handle deprecated preprocessor definitions. In each case, we only check for
395  * the old name if the new name is not defined, so that user code can define
396  * both and hence work with either version of the bindings.
397  */
398 #if !defined(CL_HPP_USE_DX_INTEROP) && defined(USE_DX_INTEROP)
399 # pragma message("cl2.hpp: USE_DX_INTEROP is deprecated. Define CL_HPP_USE_DX_INTEROP instead")
400 # define CL_HPP_USE_DX_INTEROP
401 #endif
402 #if !defined(CL_HPP_USE_CL_DEVICE_FISSION) && defined(USE_CL_DEVICE_FISSION)
403 # pragma message("cl2.hpp: USE_CL_DEVICE_FISSION is deprecated. Define CL_HPP_USE_CL_DEVICE_FISSION instead")
404 # define CL_HPP_USE_CL_DEVICE_FISSION
405 #endif
406 #if !defined(CL_HPP_ENABLE_EXCEPTIONS) && defined(__CL_ENABLE_EXCEPTIONS)
407 # pragma message("cl2.hpp: __CL_ENABLE_EXCEPTIONS is deprecated. Define CL_HPP_ENABLE_EXCEPTIONS instead")
408 # define CL_HPP_ENABLE_EXCEPTIONS
409 #endif
410 #if !defined(CL_HPP_NO_STD_VECTOR) && defined(__NO_STD_VECTOR)
411 # pragma message("cl2.hpp: __NO_STD_VECTOR is deprecated. Define CL_HPP_NO_STD_VECTOR instead")
412 # define CL_HPP_NO_STD_VECTOR
413 #endif
414 #if !defined(CL_HPP_NO_STD_STRING) && defined(__NO_STD_STRING)
415 # pragma message("cl2.hpp: __NO_STD_STRING is deprecated. Define CL_HPP_NO_STD_STRING instead")
416 # define CL_HPP_NO_STD_STRING
417 #endif
418 #if defined(VECTOR_CLASS)
419 # pragma message("cl2.hpp: VECTOR_CLASS is deprecated. Alias cl::vector instead")
420 #endif
421 #if defined(STRING_CLASS)
422 # pragma message("cl2.hpp: STRING_CLASS is deprecated. Alias cl::string instead.")
423 #endif
424 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS) && defined(__CL_USER_OVERRIDE_ERROR_STRINGS)
425 # pragma message("cl2.hpp: __CL_USER_OVERRIDE_ERROR_STRINGS is deprecated. Define CL_HPP_USER_OVERRIDE_ERROR_STRINGS instead")
426 # define CL_HPP_USER_OVERRIDE_ERROR_STRINGS
427 #endif
428 
429 /* Warn about features that are no longer supported
430  */
431 #if defined(__USE_DEV_VECTOR)
432 # pragma message("cl2.hpp: __USE_DEV_VECTOR is no longer supported. Expect compilation errors")
433 #endif
434 #if defined(__USE_DEV_STRING)
435 # pragma message("cl2.hpp: __USE_DEV_STRING is no longer supported. Expect compilation errors")
436 #endif
437 
438 /* Detect which version to target */
439 #if !defined(CL_HPP_TARGET_OPENCL_VERSION)
440 # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not defined. It will default to 200 (OpenCL 2.0)")
441 # define CL_HPP_TARGET_OPENCL_VERSION 200
442 #endif
443 #if CL_HPP_TARGET_OPENCL_VERSION != 100 && CL_HPP_TARGET_OPENCL_VERSION != 110 && CL_HPP_TARGET_OPENCL_VERSION != 120 && CL_HPP_TARGET_OPENCL_VERSION != 200
444 # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not a valid value (100, 110, 120 or 200). It will be set to 200")
445 # undef CL_HPP_TARGET_OPENCL_VERSION
446 # define CL_HPP_TARGET_OPENCL_VERSION 200
447 #endif
448 
449 #if !defined(CL_HPP_MINIMUM_OPENCL_VERSION)
450 # define CL_HPP_MINIMUM_OPENCL_VERSION 200
451 #endif
452 #if CL_HPP_MINIMUM_OPENCL_VERSION != 100 && CL_HPP_MINIMUM_OPENCL_VERSION != 110 && CL_HPP_MINIMUM_OPENCL_VERSION != 120 && CL_HPP_MINIMUM_OPENCL_VERSION != 200
453 # pragma message("cl2.hpp: CL_HPP_MINIMUM_OPENCL_VERSION is not a valid value (100, 110, 120 or 200). It will be set to 100")
454 # undef CL_HPP_MINIMUM_OPENCL_VERSION
455 # define CL_HPP_MINIMUM_OPENCL_VERSION 100
456 #endif
457 #if CL_HPP_MINIMUM_OPENCL_VERSION > CL_HPP_TARGET_OPENCL_VERSION
458 # error "CL_HPP_MINIMUM_OPENCL_VERSION must not be greater than CL_HPP_TARGET_OPENCL_VERSION"
459 #endif
460 
461 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 100 && !defined(CL_USE_DEPRECATED_OPENCL_1_0_APIS)
462 # define CL_USE_DEPRECATED_OPENCL_1_0_APIS
463 #endif
464 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 110 && !defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
465 # define CL_USE_DEPRECATED_OPENCL_1_1_APIS
466 #endif
467 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 120 && !defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
468 # define CL_USE_DEPRECATED_OPENCL_1_2_APIS
469 #endif
470 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 200 && !defined(CL_USE_DEPRECATED_OPENCL_2_0_APIS)
471 # define CL_USE_DEPRECATED_OPENCL_2_0_APIS
472 #endif
473 
474 #ifdef _WIN32
475 
476 #include <malloc.h>
477 
478 #if defined(CL_HPP_USE_DX_INTEROP)
479 #include <CL/cl_d3d10.h>
480 #include <CL/cl_dx9_media_sharing.h>
481 #endif
482 #endif // _WIN32
483 
484 #if defined(_MSC_VER)
485 #include <intrin.h>
486 #endif // _MSC_VER
487 
488  // Check for a valid C++ version
489 
490 // Need to do both tests here because for some reason __cplusplus is not
491 // updated in visual studio
492 #if (!defined(_MSC_VER) && __cplusplus < 201103L) || (defined(_MSC_VER) && _MSC_VER < 1700)
493 #error Visual studio 2013 or another C++11-supporting compiler required
494 #endif
495 
496 //
497 #if defined(CL_HPP_USE_CL_DEVICE_FISSION) || defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
498 #include <CL/cl_ext.h>
499 #endif
500 
501 #if defined(__APPLE__) || defined(__MACOSX)
502 #include <OpenCL/opencl.h>
503 #else
504 #include <CL/opencl.h>
505 #endif // !__APPLE__
506 
507 #if (__cplusplus >= 201103L)
508 #define CL_HPP_NOEXCEPT_ noexcept
509 #else
510 #define CL_HPP_NOEXCEPT_
511 #endif
512 
513 #if defined(_MSC_VER)
514 # define CL_HPP_DEFINE_STATIC_MEMBER_ __declspec(selectany)
515 #else
516 # define CL_HPP_DEFINE_STATIC_MEMBER_ __attribute__((weak))
517 #endif // !_MSC_VER
518 
519 // Define deprecated prefixes and suffixes to ensure compilation
520 // in case they are not pre-defined
521 #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
522 #define CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
523 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
524 #if !defined(CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED)
525 #define CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
526 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
527 
528 #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
529 #define CL_EXT_PREFIX__VERSION_1_2_DEPRECATED
530 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
531 #if !defined(CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED)
532 #define CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
533 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
534 
535 #if !defined(CL_CALLBACK)
536 #define CL_CALLBACK
537 #endif //CL_CALLBACK
538 
539 #include <utility>
540 #include <limits>
541 #include <iterator>
542 #include <mutex>
543 #include <cstring>
544 #include <functional>
545 
546 
547 // Define a size_type to represent a correctly resolved size_t
548 #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
549 namespace cl {
550  using size_type = ::size_t;
551 } // namespace cl
552 #else // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
553 namespace cl {
554  using size_type = size_t;
555 } // namespace cl
556 #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
557 
558 
559 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
560 #include <exception>
561 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
562 
563 #if !defined(CL_HPP_NO_STD_VECTOR)
564 #include <vector>
565 namespace cl {
566  template < class T, class Alloc = std::allocator<T> >
567  using vector = std::vector<T, Alloc>;
568 } // namespace cl
569 #endif // #if !defined(CL_HPP_NO_STD_VECTOR)
570 
571 #if !defined(CL_HPP_NO_STD_STRING)
572 #include <string>
573 namespace cl {
574  using string = std::string;
575 } // namespace cl
576 #endif // #if !defined(CL_HPP_NO_STD_STRING)
577 
578 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
579 
580 #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
581 #include <memory>
582 namespace cl {
583  // Replace unique_ptr and allocate_pointer for internal use
584  // to allow user to replace them
585  template<class T, class D>
586  using pointer = std::unique_ptr<T, D>;
587 } // namespace cl
588 #endif
589 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
590 #if !defined(CL_HPP_NO_STD_ARRAY)
591 #include <array>
592 namespace cl {
593  template < class T, size_type N >
594  using array = std::array<T, N>;
595 } // namespace cl
596 #endif // #if !defined(CL_HPP_NO_STD_ARRAY)
597 
598 // Define size_type appropriately to allow backward-compatibility
599 // use of the old size_t interface class
600 #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
601 namespace cl {
602  namespace compatibility {
607  template <int N>
608  class size_t
609  {
610  private:
611  size_type data_[N];
612 
613  public:
615  size_t()
616  {
617  for (int i = 0; i < N; ++i) {
618  data_[i] = 0;
619  }
620  }
621 
622  size_t(const array<size_type, N> &rhs)
623  {
624  for (int i = 0; i < N; ++i) {
625  data_[i] = rhs[i];
626  }
627  }
628 
629  size_type& operator[](int index)
630  {
631  return data_[index];
632  }
633 
634  const size_type& operator[](int index) const
635  {
636  return data_[index];
637  }
638 
640  operator size_type* () { return data_; }
641 
643  operator const size_type* () const { return data_; }
644 
645  operator array<size_type, N>() const
646  {
647  array<size_type, N> ret;
648 
649  for (int i = 0; i < N; ++i) {
650  ret[i] = data_[i];
651  }
652  return ret;
653  }
654  };
655  } // namespace compatibility
656 
657  template<int N>
658  using size_t = compatibility::size_t<N>;
659 } // namespace cl
660 #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
661 
662 // Helper alias to avoid confusing the macros
663 namespace cl {
664  namespace detail {
665  using size_t_array = array<size_type, 3>;
666  } // namespace detail
667 } // namespace cl
668 
669 
675 namespace cl {
676  class Memory;
677 
678 #define CL_HPP_INIT_CL_EXT_FCN_PTR_(name) \
679  if (!pfn_##name) { \
680  pfn_##name = (PFN_##name) \
681  clGetExtensionFunctionAddress(#name); \
682  if (!pfn_##name) { \
683  } \
684  }
685 
686 #define CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, name) \
687  if (!pfn_##name) { \
688  pfn_##name = (PFN_##name) \
689  clGetExtensionFunctionAddressForPlatform(platform, #name); \
690  if (!pfn_##name) { \
691  } \
692  }
693 
694  class Program;
695  class Device;
696  class Context;
697  class CommandQueue;
698  class DeviceCommandQueue;
699  class Memory;
700  class Buffer;
701  class Pipe;
702 
703 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
704 
708  class Error : public std::exception
709  {
710  private:
711  cl_int err_;
712  const char * errStr_;
713  public:
723  Error(cl_int err, const char * errStr = NULL) : err_(err), errStr_(errStr)
724  {}
725 
726  ~Error() throw() {}
727 
732  virtual const char * what() const throw ()
733  {
734  if (errStr_ == NULL) {
735  return "empty";
736  }
737  else {
738  return errStr_;
739  }
740  }
741 
746  cl_int err(void) const { return err_; }
747  };
748 #define CL_HPP_ERR_STR_(x) #x
749 #else
750 #define CL_HPP_ERR_STR_(x) NULL
751 #endif // CL_HPP_ENABLE_EXCEPTIONS
752 
753 
754 namespace detail
755 {
756 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
757 static inline cl_int errHandler (
758  cl_int err,
759  const char * errStr = NULL)
760 {
761  if (err != CL_SUCCESS) {
762  throw Error(err, errStr);
763  }
764  return err;
765 }
766 #else
767 static inline cl_int errHandler (cl_int err, const char * errStr = NULL)
768 {
769  (void) errStr; // suppress unused variable warning
770  return err;
771 }
772 #endif // CL_HPP_ENABLE_EXCEPTIONS
773 }
774 
775 
776 
778 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
779 #define __GET_DEVICE_INFO_ERR CL_HPP_ERR_STR_(clGetDeviceInfo)
780 #define __GET_PLATFORM_INFO_ERR CL_HPP_ERR_STR_(clGetPlatformInfo)
781 #define __GET_DEVICE_IDS_ERR CL_HPP_ERR_STR_(clGetDeviceIDs)
782 #define __GET_PLATFORM_IDS_ERR CL_HPP_ERR_STR_(clGetPlatformIDs)
783 #define __GET_CONTEXT_INFO_ERR CL_HPP_ERR_STR_(clGetContextInfo)
784 #define __GET_EVENT_INFO_ERR CL_HPP_ERR_STR_(clGetEventInfo)
785 #define __GET_EVENT_PROFILE_INFO_ERR CL_HPP_ERR_STR_(clGetEventProfileInfo)
786 #define __GET_MEM_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetMemObjectInfo)
787 #define __GET_IMAGE_INFO_ERR CL_HPP_ERR_STR_(clGetImageInfo)
788 #define __GET_SAMPLER_INFO_ERR CL_HPP_ERR_STR_(clGetSamplerInfo)
789 #define __GET_KERNEL_INFO_ERR CL_HPP_ERR_STR_(clGetKernelInfo)
790 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
791 #define __GET_KERNEL_ARG_INFO_ERR CL_HPP_ERR_STR_(clGetKernelArgInfo)
792 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
793 #define __GET_KERNEL_WORK_GROUP_INFO_ERR CL_HPP_ERR_STR_(clGetKernelWorkGroupInfo)
794 #define __GET_PROGRAM_INFO_ERR CL_HPP_ERR_STR_(clGetProgramInfo)
795 #define __GET_PROGRAM_BUILD_INFO_ERR CL_HPP_ERR_STR_(clGetProgramBuildInfo)
796 #define __GET_COMMAND_QUEUE_INFO_ERR CL_HPP_ERR_STR_(clGetCommandQueueInfo)
797 
798 #define __CREATE_CONTEXT_ERR CL_HPP_ERR_STR_(clCreateContext)
799 #define __CREATE_CONTEXT_FROM_TYPE_ERR CL_HPP_ERR_STR_(clCreateContextFromType)
800 #define __GET_SUPPORTED_IMAGE_FORMATS_ERR CL_HPP_ERR_STR_(clGetSupportedImageFormats)
801 
802 #define __CREATE_BUFFER_ERR CL_HPP_ERR_STR_(clCreateBuffer)
803 #define __COPY_ERR CL_HPP_ERR_STR_(cl::copy)
804 #define __CREATE_SUBBUFFER_ERR CL_HPP_ERR_STR_(clCreateSubBuffer)
805 #define __CREATE_GL_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
806 #define __CREATE_GL_RENDER_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
807 #define __GET_GL_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetGLObjectInfo)
808 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
809 #define __CREATE_IMAGE_ERR CL_HPP_ERR_STR_(clCreateImage)
810 #define __CREATE_GL_TEXTURE_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture)
811 #define __IMAGE_DIMENSION_ERR CL_HPP_ERR_STR_(Incorrect image dimensions)
812 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
813 #define __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR CL_HPP_ERR_STR_(clSetMemObjectDestructorCallback)
814 
815 #define __CREATE_USER_EVENT_ERR CL_HPP_ERR_STR_(clCreateUserEvent)
816 #define __SET_USER_EVENT_STATUS_ERR CL_HPP_ERR_STR_(clSetUserEventStatus)
817 #define __SET_EVENT_CALLBACK_ERR CL_HPP_ERR_STR_(clSetEventCallback)
818 #define __WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clWaitForEvents)
819 
820 #define __CREATE_KERNEL_ERR CL_HPP_ERR_STR_(clCreateKernel)
821 #define __SET_KERNEL_ARGS_ERR CL_HPP_ERR_STR_(clSetKernelArg)
822 #define __CREATE_PROGRAM_WITH_SOURCE_ERR CL_HPP_ERR_STR_(clCreateProgramWithSource)
823 #define __CREATE_PROGRAM_WITH_BINARY_ERR CL_HPP_ERR_STR_(clCreateProgramWithBinary)
824 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
825 #define __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR CL_HPP_ERR_STR_(clCreateProgramWithBuiltInKernels)
826 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
827 #define __BUILD_PROGRAM_ERR CL_HPP_ERR_STR_(clBuildProgram)
828 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
829 #define __COMPILE_PROGRAM_ERR CL_HPP_ERR_STR_(clCompileProgram)
830 #define __LINK_PROGRAM_ERR CL_HPP_ERR_STR_(clLinkProgram)
831 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
832 #define __CREATE_KERNELS_IN_PROGRAM_ERR CL_HPP_ERR_STR_(clCreateKernelsInProgram)
833 
834 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
835 #define __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateCommandQueueWithProperties)
836 #define __CREATE_SAMPLER_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateSamplerWithProperties)
837 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
838 #define __SET_COMMAND_QUEUE_PROPERTY_ERR CL_HPP_ERR_STR_(clSetCommandQueueProperty)
839 #define __ENQUEUE_READ_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueReadBuffer)
840 #define __ENQUEUE_READ_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueReadBufferRect)
841 #define __ENQUEUE_WRITE_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueWriteBuffer)
842 #define __ENQUEUE_WRITE_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueWriteBufferRect)
843 #define __ENQEUE_COPY_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyBuffer)
844 #define __ENQEUE_COPY_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferRect)
845 #define __ENQUEUE_FILL_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueFillBuffer)
846 #define __ENQUEUE_READ_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueReadImage)
847 #define __ENQUEUE_WRITE_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueWriteImage)
848 #define __ENQUEUE_COPY_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyImage)
849 #define __ENQUEUE_FILL_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueFillImage)
850 #define __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyImageToBuffer)
851 #define __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferToImage)
852 #define __ENQUEUE_MAP_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueMapBuffer)
853 #define __ENQUEUE_MAP_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueMapImage)
854 #define __ENQUEUE_UNMAP_MEM_OBJECT_ERR CL_HPP_ERR_STR_(clEnqueueUnMapMemObject)
855 #define __ENQUEUE_NDRANGE_KERNEL_ERR CL_HPP_ERR_STR_(clEnqueueNDRangeKernel)
856 #define __ENQUEUE_NATIVE_KERNEL CL_HPP_ERR_STR_(clEnqueueNativeKernel)
857 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
858 #define __ENQUEUE_MIGRATE_MEM_OBJECTS_ERR CL_HPP_ERR_STR_(clEnqueueMigrateMemObjects)
859 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
860 
861 #define __ENQUEUE_ACQUIRE_GL_ERR CL_HPP_ERR_STR_(clEnqueueAcquireGLObjects)
862 #define __ENQUEUE_RELEASE_GL_ERR CL_HPP_ERR_STR_(clEnqueueReleaseGLObjects)
863 
864 #define __CREATE_PIPE_ERR CL_HPP_ERR_STR_(clCreatePipe)
865 #define __GET_PIPE_INFO_ERR CL_HPP_ERR_STR_(clGetPipeInfo)
866 
867 
868 #define __RETAIN_ERR CL_HPP_ERR_STR_(Retain Object)
869 #define __RELEASE_ERR CL_HPP_ERR_STR_(Release Object)
870 #define __FLUSH_ERR CL_HPP_ERR_STR_(clFlush)
871 #define __FINISH_ERR CL_HPP_ERR_STR_(clFinish)
872 #define __VECTOR_CAPACITY_ERR CL_HPP_ERR_STR_(Vector capacity error)
873 
877 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
878 #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevices)
879 #else
880 #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevicesEXT)
881 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
882 
886 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
887 #define __ENQUEUE_MARKER_ERR CL_HPP_ERR_STR_(clEnqueueMarker)
888 #define __ENQUEUE_WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clEnqueueWaitForEvents)
889 #define __ENQUEUE_BARRIER_ERR CL_HPP_ERR_STR_(clEnqueueBarrier)
890 #define __UNLOAD_COMPILER_ERR CL_HPP_ERR_STR_(clUnloadCompiler)
891 #define __CREATE_GL_TEXTURE_2D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture2D)
892 #define __CREATE_GL_TEXTURE_3D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture3D)
893 #define __CREATE_IMAGE2D_ERR CL_HPP_ERR_STR_(clCreateImage2D)
894 #define __CREATE_IMAGE3D_ERR CL_HPP_ERR_STR_(clCreateImage3D)
895 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
896 
900 #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
901 #define __CREATE_COMMAND_QUEUE_ERR CL_HPP_ERR_STR_(clCreateCommandQueue)
902 #define __ENQUEUE_TASK_ERR CL_HPP_ERR_STR_(clEnqueueTask)
903 #define __CREATE_SAMPLER_ERR CL_HPP_ERR_STR_(clCreateSampler)
904 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
905 
909 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
910 #define __ENQUEUE_MARKER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueMarkerWithWaitList)
911 #define __ENQUEUE_BARRIER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueBarrierWithWaitList)
912 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
913 
914 #endif // CL_HPP_USER_OVERRIDE_ERROR_STRINGS
915 
917 
918 namespace detail {
919 
920 // Generic getInfoHelper. The final parameter is used to guide overload
921 // resolution: the actual parameter passed is an int, which makes this
922 // a worse conversion sequence than a specialization that declares the
923 // parameter as an int.
924 template<typename Functor, typename T>
925 inline cl_int getInfoHelper(Functor f, cl_uint name, T* param, long)
926 {
927  return f(name, sizeof(T), param, NULL);
928 }
929 
930 // Specialized for getInfo<CL_PROGRAM_BINARIES>
931 // Assumes that the output vector was correctly resized on the way in
932 template <typename Func>
933 inline cl_int getInfoHelper(Func f, cl_uint name, vector<vector<unsigned char>>* param, int)
934 {
935  if (name != CL_PROGRAM_BINARIES) {
936  return CL_INVALID_VALUE;
937  }
938  if (param) {
939  // Create array of pointers, calculate total size and pass pointer array in
940  size_type numBinaries = param->size();
941  vector<unsigned char*> binariesPointers(numBinaries);
942 
943  for (size_type i = 0; i < numBinaries; ++i)
944  {
945  binariesPointers[i] = (*param)[i].data();
946  }
947 
948  cl_int err = f(name, numBinaries * sizeof(unsigned char*), binariesPointers.data(), NULL);
949 
950  if (err != CL_SUCCESS) {
951  return err;
952  }
953  }
954 
955 
956  return CL_SUCCESS;
957 }
958 
959 // Specialized getInfoHelper for vector params
960 template <typename Func, typename T>
961 inline cl_int getInfoHelper(Func f, cl_uint name, vector<T>* param, long)
962 {
963  size_type required;
964  cl_int err = f(name, 0, NULL, &required);
965  if (err != CL_SUCCESS) {
966  return err;
967  }
968  const size_type elements = required / sizeof(T);
969 
970  // Temporary to avoid changing param on an error
971  vector<T> localData(elements);
972  err = f(name, required, localData.data(), NULL);
973  if (err != CL_SUCCESS) {
974  return err;
975  }
976  if (param) {
977  *param = std::move(localData);
978  }
979 
980  return CL_SUCCESS;
981 }
982 
983 /* Specialization for reference-counted types. This depends on the
984  * existence of Wrapper<T>::cl_type, and none of the other types having the
985  * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
986  * does not work, because when using a derived type (e.g. Context) the generic
987  * template will provide a better match.
988  */
989 template <typename Func, typename T>
990 inline cl_int getInfoHelper(
991  Func f, cl_uint name, vector<T>* param, int, typename T::cl_type = 0)
992 {
993  size_type required;
994  cl_int err = f(name, 0, NULL, &required);
995  if (err != CL_SUCCESS) {
996  return err;
997  }
998 
999  const size_type elements = required / sizeof(typename T::cl_type);
1000 
1001  vector<typename T::cl_type> value(elements);
1002  err = f(name, required, value.data(), NULL);
1003  if (err != CL_SUCCESS) {
1004  return err;
1005  }
1006 
1007  if (param) {
1008  // Assign to convert CL type to T for each element
1009  param->resize(elements);
1010 
1011  // Assign to param, constructing with retain behaviour
1012  // to correctly capture each underlying CL object
1013  for (size_type i = 0; i < elements; i++) {
1014  (*param)[i] = T(value[i], true);
1015  }
1016  }
1017  return CL_SUCCESS;
1018 }
1019 
1020 // Specialized GetInfoHelper for string params
1021 template <typename Func>
1022 inline cl_int getInfoHelper(Func f, cl_uint name, string* param, long)
1023 {
1024  size_type required;
1025  cl_int err = f(name, 0, NULL, &required);
1026  if (err != CL_SUCCESS) {
1027  return err;
1028  }
1029 
1030  // std::string has a constant data member
1031  // a char vector does not
1032  if (required > 0) {
1033  vector<char> value(required);
1034  err = f(name, required, value.data(), NULL);
1035  if (err != CL_SUCCESS) {
1036  return err;
1037  }
1038  if (param) {
1039  param->assign(begin(value), prev(end(value)));
1040  }
1041  }
1042  else if (param) {
1043  param->assign("");
1044  }
1045  return CL_SUCCESS;
1046 }
1047 
1048 // Specialized GetInfoHelper for clsize_t params
1049 template <typename Func, size_type N>
1050 inline cl_int getInfoHelper(Func f, cl_uint name, array<size_type, N>* param, long)
1051 {
1052  size_type required;
1053  cl_int err = f(name, 0, NULL, &required);
1054  if (err != CL_SUCCESS) {
1055  return err;
1056  }
1057 
1058  size_type elements = required / sizeof(size_type);
1059  vector<size_type> value(elements, 0);
1060 
1061  err = f(name, required, value.data(), NULL);
1062  if (err != CL_SUCCESS) {
1063  return err;
1064  }
1065 
1066  // Bound the copy with N to prevent overruns
1067  // if passed N > than the amount copied
1068  if (elements > N) {
1069  elements = N;
1070  }
1071  for (size_type i = 0; i < elements; ++i) {
1072  (*param)[i] = value[i];
1073  }
1074 
1075  return CL_SUCCESS;
1076 }
1077 
1078 template<typename T> struct ReferenceHandler;
1079 
1080 /* Specialization for reference-counted types. This depends on the
1081  * existence of Wrapper<T>::cl_type, and none of the other types having the
1082  * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
1083  * does not work, because when using a derived type (e.g. Context) the generic
1084  * template will provide a better match.
1085  */
1086 template<typename Func, typename T>
1087 inline cl_int getInfoHelper(Func f, cl_uint name, T* param, int, typename T::cl_type = 0)
1088 {
1089  typename T::cl_type value;
1090  cl_int err = f(name, sizeof(value), &value, NULL);
1091  if (err != CL_SUCCESS) {
1092  return err;
1093  }
1094  *param = value;
1095  if (value != NULL)
1096  {
1097  err = param->retain();
1098  if (err != CL_SUCCESS) {
1099  return err;
1100  }
1101  }
1102  return CL_SUCCESS;
1103 }
1104 
1105 #define CL_HPP_PARAM_NAME_INFO_1_0_(F) \
1106  F(cl_platform_info, CL_PLATFORM_PROFILE, string) \
1107  F(cl_platform_info, CL_PLATFORM_VERSION, string) \
1108  F(cl_platform_info, CL_PLATFORM_NAME, string) \
1109  F(cl_platform_info, CL_PLATFORM_VENDOR, string) \
1110  F(cl_platform_info, CL_PLATFORM_EXTENSIONS, string) \
1111  \
1112  F(cl_device_info, CL_DEVICE_TYPE, cl_device_type) \
1113  F(cl_device_info, CL_DEVICE_VENDOR_ID, cl_uint) \
1114  F(cl_device_info, CL_DEVICE_MAX_COMPUTE_UNITS, cl_uint) \
1115  F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_DIMENSIONS, cl_uint) \
1116  F(cl_device_info, CL_DEVICE_MAX_WORK_GROUP_SIZE, size_type) \
1117  F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_SIZES, cl::vector<size_type>) \
1118  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_CHAR, cl_uint) \
1119  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_SHORT, cl_uint) \
1120  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_INT, cl_uint) \
1121  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_LONG, cl_uint) \
1122  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_FLOAT, cl_uint) \
1123  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_DOUBLE, cl_uint) \
1124  F(cl_device_info, CL_DEVICE_MAX_CLOCK_FREQUENCY, cl_uint) \
1125  F(cl_device_info, CL_DEVICE_ADDRESS_BITS, cl_uint) \
1126  F(cl_device_info, CL_DEVICE_MAX_READ_IMAGE_ARGS, cl_uint) \
1127  F(cl_device_info, CL_DEVICE_MAX_WRITE_IMAGE_ARGS, cl_uint) \
1128  F(cl_device_info, CL_DEVICE_MAX_MEM_ALLOC_SIZE, cl_ulong) \
1129  F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_WIDTH, size_type) \
1130  F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_HEIGHT, size_type) \
1131  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_WIDTH, size_type) \
1132  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_HEIGHT, size_type) \
1133  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_DEPTH, size_type) \
1134  F(cl_device_info, CL_DEVICE_IMAGE_SUPPORT, cl_bool) \
1135  F(cl_device_info, CL_DEVICE_MAX_PARAMETER_SIZE, size_type) \
1136  F(cl_device_info, CL_DEVICE_MAX_SAMPLERS, cl_uint) \
1137  F(cl_device_info, CL_DEVICE_MEM_BASE_ADDR_ALIGN, cl_uint) \
1138  F(cl_device_info, CL_DEVICE_MIN_DATA_TYPE_ALIGN_SIZE, cl_uint) \
1139  F(cl_device_info, CL_DEVICE_SINGLE_FP_CONFIG, cl_device_fp_config) \
1140  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_TYPE, cl_device_mem_cache_type) \
1141  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHELINE_SIZE, cl_uint)\
1142  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_SIZE, cl_ulong) \
1143  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_SIZE, cl_ulong) \
1144  F(cl_device_info, CL_DEVICE_MAX_CONSTANT_BUFFER_SIZE, cl_ulong) \
1145  F(cl_device_info, CL_DEVICE_MAX_CONSTANT_ARGS, cl_uint) \
1146  F(cl_device_info, CL_DEVICE_LOCAL_MEM_TYPE, cl_device_local_mem_type) \
1147  F(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE, cl_ulong) \
1148  F(cl_device_info, CL_DEVICE_ERROR_CORRECTION_SUPPORT, cl_bool) \
1149  F(cl_device_info, CL_DEVICE_PROFILING_TIMER_RESOLUTION, size_type) \
1150  F(cl_device_info, CL_DEVICE_ENDIAN_LITTLE, cl_bool) \
1151  F(cl_device_info, CL_DEVICE_AVAILABLE, cl_bool) \
1152  F(cl_device_info, CL_DEVICE_COMPILER_AVAILABLE, cl_bool) \
1153  F(cl_device_info, CL_DEVICE_EXECUTION_CAPABILITIES, cl_device_exec_capabilities) \
1154  F(cl_device_info, CL_DEVICE_PLATFORM, cl_platform_id) \
1155  F(cl_device_info, CL_DEVICE_NAME, string) \
1156  F(cl_device_info, CL_DEVICE_VENDOR, string) \
1157  F(cl_device_info, CL_DRIVER_VERSION, string) \
1158  F(cl_device_info, CL_DEVICE_PROFILE, string) \
1159  F(cl_device_info, CL_DEVICE_VERSION, string) \
1160  F(cl_device_info, CL_DEVICE_EXTENSIONS, string) \
1161  \
1162  F(cl_context_info, CL_CONTEXT_REFERENCE_COUNT, cl_uint) \
1163  F(cl_context_info, CL_CONTEXT_DEVICES, cl::vector<Device>) \
1164  F(cl_context_info, CL_CONTEXT_PROPERTIES, cl::vector<cl_context_properties>) \
1165  \
1166  F(cl_event_info, CL_EVENT_COMMAND_QUEUE, cl::CommandQueue) \
1167  F(cl_event_info, CL_EVENT_COMMAND_TYPE, cl_command_type) \
1168  F(cl_event_info, CL_EVENT_REFERENCE_COUNT, cl_uint) \
1169  F(cl_event_info, CL_EVENT_COMMAND_EXECUTION_STATUS, cl_int) \
1170  \
1171  F(cl_profiling_info, CL_PROFILING_COMMAND_QUEUED, cl_ulong) \
1172  F(cl_profiling_info, CL_PROFILING_COMMAND_SUBMIT, cl_ulong) \
1173  F(cl_profiling_info, CL_PROFILING_COMMAND_START, cl_ulong) \
1174  F(cl_profiling_info, CL_PROFILING_COMMAND_END, cl_ulong) \
1175  \
1176  F(cl_mem_info, CL_MEM_TYPE, cl_mem_object_type) \
1177  F(cl_mem_info, CL_MEM_FLAGS, cl_mem_flags) \
1178  F(cl_mem_info, CL_MEM_SIZE, size_type) \
1179  F(cl_mem_info, CL_MEM_HOST_PTR, void*) \
1180  F(cl_mem_info, CL_MEM_MAP_COUNT, cl_uint) \
1181  F(cl_mem_info, CL_MEM_REFERENCE_COUNT, cl_uint) \
1182  F(cl_mem_info, CL_MEM_CONTEXT, cl::Context) \
1183  \
1184  F(cl_image_info, CL_IMAGE_FORMAT, cl_image_format) \
1185  F(cl_image_info, CL_IMAGE_ELEMENT_SIZE, size_type) \
1186  F(cl_image_info, CL_IMAGE_ROW_PITCH, size_type) \
1187  F(cl_image_info, CL_IMAGE_SLICE_PITCH, size_type) \
1188  F(cl_image_info, CL_IMAGE_WIDTH, size_type) \
1189  F(cl_image_info, CL_IMAGE_HEIGHT, size_type) \
1190  F(cl_image_info, CL_IMAGE_DEPTH, size_type) \
1191  \
1192  F(cl_sampler_info, CL_SAMPLER_REFERENCE_COUNT, cl_uint) \
1193  F(cl_sampler_info, CL_SAMPLER_CONTEXT, cl::Context) \
1194  F(cl_sampler_info, CL_SAMPLER_NORMALIZED_COORDS, cl_bool) \
1195  F(cl_sampler_info, CL_SAMPLER_ADDRESSING_MODE, cl_addressing_mode) \
1196  F(cl_sampler_info, CL_SAMPLER_FILTER_MODE, cl_filter_mode) \
1197  \
1198  F(cl_program_info, CL_PROGRAM_REFERENCE_COUNT, cl_uint) \
1199  F(cl_program_info, CL_PROGRAM_CONTEXT, cl::Context) \
1200  F(cl_program_info, CL_PROGRAM_NUM_DEVICES, cl_uint) \
1201  F(cl_program_info, CL_PROGRAM_DEVICES, cl::vector<Device>) \
1202  F(cl_program_info, CL_PROGRAM_SOURCE, string) \
1203  F(cl_program_info, CL_PROGRAM_BINARY_SIZES, cl::vector<size_type>) \
1204  F(cl_program_info, CL_PROGRAM_BINARIES, cl::vector<cl::vector<unsigned char>>) \
1205  \
1206  F(cl_program_build_info, CL_PROGRAM_BUILD_STATUS, cl_build_status) \
1207  F(cl_program_build_info, CL_PROGRAM_BUILD_OPTIONS, string) \
1208  F(cl_program_build_info, CL_PROGRAM_BUILD_LOG, string) \
1209  \
1210  F(cl_kernel_info, CL_KERNEL_FUNCTION_NAME, string) \
1211  F(cl_kernel_info, CL_KERNEL_NUM_ARGS, cl_uint) \
1212  F(cl_kernel_info, CL_KERNEL_REFERENCE_COUNT, cl_uint) \
1213  F(cl_kernel_info, CL_KERNEL_CONTEXT, cl::Context) \
1214  F(cl_kernel_info, CL_KERNEL_PROGRAM, cl::Program) \
1215  \
1216  F(cl_kernel_work_group_info, CL_KERNEL_WORK_GROUP_SIZE, size_type) \
1217  F(cl_kernel_work_group_info, CL_KERNEL_COMPILE_WORK_GROUP_SIZE, cl::detail::size_t_array) \
1218  F(cl_kernel_work_group_info, CL_KERNEL_LOCAL_MEM_SIZE, cl_ulong) \
1219  \
1220  F(cl_command_queue_info, CL_QUEUE_CONTEXT, cl::Context) \
1221  F(cl_command_queue_info, CL_QUEUE_DEVICE, cl::Device) \
1222  F(cl_command_queue_info, CL_QUEUE_REFERENCE_COUNT, cl_uint) \
1223  F(cl_command_queue_info, CL_QUEUE_PROPERTIES, cl_command_queue_properties)
1224 
1225 
1226 #define CL_HPP_PARAM_NAME_INFO_1_1_(F) \
1227  F(cl_context_info, CL_CONTEXT_NUM_DEVICES, cl_uint)\
1228  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_HALF, cl_uint) \
1229  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_CHAR, cl_uint) \
1230  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_SHORT, cl_uint) \
1231  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_INT, cl_uint) \
1232  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_LONG, cl_uint) \
1233  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_FLOAT, cl_uint) \
1234  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_DOUBLE, cl_uint) \
1235  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_HALF, cl_uint) \
1236  F(cl_device_info, CL_DEVICE_DOUBLE_FP_CONFIG, cl_device_fp_config) \
1237  F(cl_device_info, CL_DEVICE_HALF_FP_CONFIG, cl_device_fp_config) \
1238  F(cl_device_info, CL_DEVICE_OPENCL_C_VERSION, string) \
1239  \
1240  F(cl_mem_info, CL_MEM_ASSOCIATED_MEMOBJECT, cl::Memory) \
1241  F(cl_mem_info, CL_MEM_OFFSET, size_type) \
1242  \
1243  F(cl_kernel_work_group_info, CL_KERNEL_PREFERRED_WORK_GROUP_SIZE_MULTIPLE, size_type) \
1244  F(cl_kernel_work_group_info, CL_KERNEL_PRIVATE_MEM_SIZE, cl_ulong) \
1245  \
1246  F(cl_event_info, CL_EVENT_CONTEXT, cl::Context)
1247 
1248 #define CL_HPP_PARAM_NAME_INFO_1_2_(F) \
1249  F(cl_program_info, CL_PROGRAM_NUM_KERNELS, size_type) \
1250  F(cl_program_info, CL_PROGRAM_KERNEL_NAMES, string) \
1251  \
1252  F(cl_program_build_info, CL_PROGRAM_BINARY_TYPE, cl_program_binary_type) \
1253  \
1254  F(cl_kernel_info, CL_KERNEL_ATTRIBUTES, string) \
1255  \
1256  F(cl_kernel_arg_info, CL_KERNEL_ARG_ADDRESS_QUALIFIER, cl_kernel_arg_address_qualifier) \
1257  F(cl_kernel_arg_info, CL_KERNEL_ARG_ACCESS_QUALIFIER, cl_kernel_arg_access_qualifier) \
1258  F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_NAME, string) \
1259  F(cl_kernel_arg_info, CL_KERNEL_ARG_NAME, string) \
1260  F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_QUALIFIER, cl_kernel_arg_type_qualifier) \
1261  \
1262  F(cl_device_info, CL_DEVICE_PARENT_DEVICE, cl::Device) \
1263  F(cl_device_info, CL_DEVICE_PARTITION_PROPERTIES, cl::vector<cl_device_partition_property>) \
1264  F(cl_device_info, CL_DEVICE_PARTITION_TYPE, cl::vector<cl_device_partition_property>) \
1265  F(cl_device_info, CL_DEVICE_REFERENCE_COUNT, cl_uint) \
1266  F(cl_device_info, CL_DEVICE_PREFERRED_INTEROP_USER_SYNC, size_type) \
1267  F(cl_device_info, CL_DEVICE_PARTITION_AFFINITY_DOMAIN, cl_device_affinity_domain) \
1268  F(cl_device_info, CL_DEVICE_BUILT_IN_KERNELS, string) \
1269  \
1270  F(cl_image_info, CL_IMAGE_ARRAY_SIZE, size_type) \
1271  F(cl_image_info, CL_IMAGE_NUM_MIP_LEVELS, cl_uint) \
1272  F(cl_image_info, CL_IMAGE_NUM_SAMPLES, cl_uint)
1273 
1274 #define CL_HPP_PARAM_NAME_INFO_2_0_(F) \
1275  F(cl_device_info, CL_DEVICE_QUEUE_ON_HOST_PROPERTIES, cl_command_queue_properties) \
1276  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PROPERTIES, cl_command_queue_properties) \
1277  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PREFERRED_SIZE, cl_uint) \
1278  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_MAX_SIZE, cl_uint) \
1279  F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_QUEUES, cl_uint) \
1280  F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_EVENTS, cl_uint) \
1281  F(cl_device_info, CL_DEVICE_MAX_PIPE_ARGS, cl_uint) \
1282  F(cl_device_info, CL_DEVICE_PIPE_MAX_ACTIVE_RESERVATIONS, cl_uint) \
1283  F(cl_device_info, CL_DEVICE_PIPE_MAX_PACKET_SIZE, cl_uint) \
1284  F(cl_device_info, CL_DEVICE_SVM_CAPABILITIES, cl_device_svm_capabilities) \
1285  F(cl_device_info, CL_DEVICE_PREFERRED_PLATFORM_ATOMIC_ALIGNMENT, cl_uint) \
1286  F(cl_device_info, CL_DEVICE_PREFERRED_GLOBAL_ATOMIC_ALIGNMENT, cl_uint) \
1287  F(cl_device_info, CL_DEVICE_PREFERRED_LOCAL_ATOMIC_ALIGNMENT, cl_uint) \
1288  F(cl_command_queue_info, CL_QUEUE_SIZE, cl_uint) \
1289  F(cl_mem_info, CL_MEM_USES_SVM_POINTER, cl_bool) \
1290  F(cl_program_build_info, CL_PROGRAM_BUILD_GLOBAL_VARIABLE_TOTAL_SIZE, size_type) \
1291  F(cl_pipe_info, CL_PIPE_PACKET_SIZE, cl_uint) \
1292  F(cl_pipe_info, CL_PIPE_MAX_PACKETS, cl_uint)
1293 
1294 #define CL_HPP_PARAM_NAME_DEVICE_FISSION_(F) \
1295  F(cl_device_info, CL_DEVICE_PARENT_DEVICE_EXT, cl_device_id) \
1296  F(cl_device_info, CL_DEVICE_PARTITION_TYPES_EXT, cl::vector<cl_device_partition_property_ext>) \
1297  F(cl_device_info, CL_DEVICE_AFFINITY_DOMAINS_EXT, cl::vector<cl_device_partition_property_ext>) \
1298  F(cl_device_info, CL_DEVICE_REFERENCE_COUNT_EXT , cl_uint) \
1299  F(cl_device_info, CL_DEVICE_PARTITION_STYLE_EXT, cl::vector<cl_device_partition_property_ext>)
1300 
1301 template <typename enum_type, cl_int Name>
1302 struct param_traits {};
1303 
1304 #define CL_HPP_DECLARE_PARAM_TRAITS_(token, param_name, T) \
1305 struct token; \
1306 template<> \
1307 struct param_traits<detail:: token,param_name> \
1308 { \
1309  enum { value = param_name }; \
1310  typedef T param_type; \
1311 };
1312 
1313 CL_HPP_PARAM_NAME_INFO_1_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1314 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
1315 CL_HPP_PARAM_NAME_INFO_1_1_(CL_HPP_DECLARE_PARAM_TRAITS_)
1316 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
1317 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1318 CL_HPP_PARAM_NAME_INFO_1_2_(CL_HPP_DECLARE_PARAM_TRAITS_)
1319 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
1320 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
1321 CL_HPP_PARAM_NAME_INFO_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1322 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
1323 
1324 
1325 // Flags deprecated in OpenCL 2.0
1326 #define CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(F) \
1327  F(cl_device_info, CL_DEVICE_QUEUE_PROPERTIES, cl_command_queue_properties)
1328 
1329 #define CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(F) \
1330  F(cl_device_info, CL_DEVICE_HOST_UNIFIED_MEMORY, cl_bool)
1331 
1332 #define CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(F) \
1333  F(cl_image_info, CL_IMAGE_BUFFER, cl::Buffer)
1334 
1335 // Include deprecated query flags based on versions
1336 // Only include deprecated 1.0 flags if 2.0 not active as there is an enum clash
1337 #if CL_HPP_TARGET_OPENCL_VERSION > 100 && CL_HPP_MINIMUM_OPENCL_VERSION < 200 && CL_HPP_TARGET_OPENCL_VERSION < 200
1338 CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1339 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 110
1340 #if CL_HPP_TARGET_OPENCL_VERSION > 110 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
1341 CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1342 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1343 #if CL_HPP_TARGET_OPENCL_VERSION > 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
1344 CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1345 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
1346 
1347 #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
1348 CL_HPP_PARAM_NAME_DEVICE_FISSION_(CL_HPP_DECLARE_PARAM_TRAITS_);
1349 #endif // CL_HPP_USE_CL_DEVICE_FISSION
1350 
1351 #ifdef CL_PLATFORM_ICD_SUFFIX_KHR
1352 CL_HPP_DECLARE_PARAM_TRAITS_(cl_platform_info, CL_PLATFORM_ICD_SUFFIX_KHR, string)
1353 #endif
1354 
1355 #ifdef CL_DEVICE_PROFILING_TIMER_OFFSET_AMD
1356 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_PROFILING_TIMER_OFFSET_AMD, cl_ulong)
1357 #endif
1358 
1359 #ifdef CL_DEVICE_GLOBAL_FREE_MEMORY_AMD
1360 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_FREE_MEMORY_AMD, vector<size_type>)
1361 #endif
1362 #ifdef CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD
1363 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD, cl_uint)
1364 #endif
1365 #ifdef CL_DEVICE_SIMD_WIDTH_AMD
1366 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_WIDTH_AMD, cl_uint)
1367 #endif
1368 #ifdef CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD
1369 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD, cl_uint)
1370 #endif
1371 #ifdef CL_DEVICE_WAVEFRONT_WIDTH_AMD
1372 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WAVEFRONT_WIDTH_AMD, cl_uint)
1373 #endif
1374 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD
1375 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD, cl_uint)
1376 #endif
1377 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD
1378 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD, cl_uint)
1379 #endif
1380 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD
1381 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD, cl_uint)
1382 #endif
1383 #ifdef CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD
1384 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD, cl_uint)
1385 #endif
1386 #ifdef CL_DEVICE_LOCAL_MEM_BANKS_AMD
1387 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_BANKS_AMD, cl_uint)
1388 #endif
1389 
1390 #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV
1391 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV, cl_uint)
1392 #endif
1393 #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV
1394 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV, cl_uint)
1395 #endif
1396 #ifdef CL_DEVICE_REGISTERS_PER_BLOCK_NV
1397 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_REGISTERS_PER_BLOCK_NV, cl_uint)
1398 #endif
1399 #ifdef CL_DEVICE_WARP_SIZE_NV
1400 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WARP_SIZE_NV, cl_uint)
1401 #endif
1402 #ifdef CL_DEVICE_GPU_OVERLAP_NV
1403 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GPU_OVERLAP_NV, cl_bool)
1404 #endif
1405 #ifdef CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV
1406 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV, cl_bool)
1407 #endif
1408 #ifdef CL_DEVICE_INTEGRATED_MEMORY_NV
1409 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_INTEGRATED_MEMORY_NV, cl_bool)
1410 #endif
1411 
1412 // Convenience functions
1413 
1414 template <typename Func, typename T>
1415 inline cl_int
1416 getInfo(Func f, cl_uint name, T* param)
1417 {
1418  return getInfoHelper(f, name, param, 0);
1419 }
1420 
1421 template <typename Func, typename Arg0>
1423 {
1424  Func f_; const Arg0& arg0_;
1425  cl_int operator ()(
1426  cl_uint param, size_type size, void* value, size_type* size_ret)
1427  { return f_(arg0_, param, size, value, size_ret); }
1428 };
1429 
1430 template <typename Func, typename Arg0, typename Arg1>
1432 {
1433  Func f_; const Arg0& arg0_; const Arg1& arg1_;
1434  cl_int operator ()(
1435  cl_uint param, size_type size, void* value, size_type* size_ret)
1436  { return f_(arg0_, arg1_, param, size, value, size_ret); }
1437 };
1438 
1439 template <typename Func, typename Arg0, typename T>
1440 inline cl_int
1441 getInfo(Func f, const Arg0& arg0, cl_uint name, T* param)
1442 {
1443  GetInfoFunctor0<Func, Arg0> f0 = { f, arg0 };
1444  return getInfoHelper(f0, name, param, 0);
1445 }
1446 
1447 template <typename Func, typename Arg0, typename Arg1, typename T>
1448 inline cl_int
1449 getInfo(Func f, const Arg0& arg0, const Arg1& arg1, cl_uint name, T* param)
1450 {
1451  GetInfoFunctor1<Func, Arg0, Arg1> f0 = { f, arg0, arg1 };
1452  return getInfoHelper(f0, name, param, 0);
1453 }
1454 
1455 
1456 template<typename T>
1457 struct ReferenceHandler
1458 { };
1459 
1460 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1461 
1464 template <>
1465 struct ReferenceHandler<cl_device_id>
1466 {
1476  static cl_int retain(cl_device_id device)
1477  { return ::clRetainDevice(device); }
1487  static cl_int release(cl_device_id device)
1488  { return ::clReleaseDevice(device); }
1489 };
1490 #else // CL_HPP_TARGET_OPENCL_VERSION >= 120
1491 
1494 template <>
1495 struct ReferenceHandler<cl_device_id>
1496 {
1497  // cl_device_id does not have retain().
1498  static cl_int retain(cl_device_id)
1499  { return CL_SUCCESS; }
1500  // cl_device_id does not have release().
1501  static cl_int release(cl_device_id)
1502  { return CL_SUCCESS; }
1503 };
1504 #endif // ! (CL_HPP_TARGET_OPENCL_VERSION >= 120)
1505 
1506 template <>
1507 struct ReferenceHandler<cl_platform_id>
1508 {
1509  // cl_platform_id does not have retain().
1510  static cl_int retain(cl_platform_id)
1511  { return CL_SUCCESS; }
1512  // cl_platform_id does not have release().
1513  static cl_int release(cl_platform_id)
1514  { return CL_SUCCESS; }
1515 };
1516 
1517 template <>
1518 struct ReferenceHandler<cl_context>
1519 {
1520  static cl_int retain(cl_context context)
1521  { return ::clRetainContext(context); }
1522  static cl_int release(cl_context context)
1523  { return ::clReleaseContext(context); }
1524 };
1525 
1526 template <>
1527 struct ReferenceHandler<cl_command_queue>
1528 {
1529  static cl_int retain(cl_command_queue queue)
1530  { return ::clRetainCommandQueue(queue); }
1531  static cl_int release(cl_command_queue queue)
1532  { return ::clReleaseCommandQueue(queue); }
1533 };
1534 
1535 template <>
1536 struct ReferenceHandler<cl_mem>
1537 {
1538  static cl_int retain(cl_mem memory)
1539  { return ::clRetainMemObject(memory); }
1540  static cl_int release(cl_mem memory)
1541  { return ::clReleaseMemObject(memory); }
1542 };
1543 
1544 template <>
1545 struct ReferenceHandler<cl_sampler>
1546 {
1547  static cl_int retain(cl_sampler sampler)
1548  { return ::clRetainSampler(sampler); }
1549  static cl_int release(cl_sampler sampler)
1550  { return ::clReleaseSampler(sampler); }
1551 };
1552 
1553 template <>
1554 struct ReferenceHandler<cl_program>
1555 {
1556  static cl_int retain(cl_program program)
1557  { return ::clRetainProgram(program); }
1558  static cl_int release(cl_program program)
1559  { return ::clReleaseProgram(program); }
1560 };
1561 
1562 template <>
1563 struct ReferenceHandler<cl_kernel>
1564 {
1565  static cl_int retain(cl_kernel kernel)
1566  { return ::clRetainKernel(kernel); }
1567  static cl_int release(cl_kernel kernel)
1568  { return ::clReleaseKernel(kernel); }
1569 };
1570 
1571 template <>
1572 struct ReferenceHandler<cl_event>
1573 {
1574  static cl_int retain(cl_event event)
1575  { return ::clRetainEvent(event); }
1576  static cl_int release(cl_event event)
1577  { return ::clReleaseEvent(event); }
1578 };
1579 
1580 
1581 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
1582 // Extracts version number with major in the upper 16 bits, minor in the lower 16
1583 static cl_uint getVersion(const vector<char> &versionInfo)
1584 {
1585  int highVersion = 0;
1586  int lowVersion = 0;
1587  int index = 7;
1588  while(versionInfo[index] != '.' ) {
1589  highVersion *= 10;
1590  highVersion += versionInfo[index]-'0';
1591  ++index;
1592  }
1593  ++index;
1594  while(versionInfo[index] != ' ' && versionInfo[index] != '\0') {
1595  lowVersion *= 10;
1596  lowVersion += versionInfo[index]-'0';
1597  ++index;
1598  }
1599  return (highVersion << 16) | lowVersion;
1600 }
1601 
1602 static cl_uint getPlatformVersion(cl_platform_id platform)
1603 {
1604  size_type size = 0;
1605  clGetPlatformInfo(platform, CL_PLATFORM_VERSION, 0, NULL, &size);
1606 
1607  vector<char> versionInfo(size);
1608  clGetPlatformInfo(platform, CL_PLATFORM_VERSION, size, versionInfo.data(), &size);
1609  return getVersion(versionInfo);
1610 }
1611 
1612 static cl_uint getDevicePlatformVersion(cl_device_id device)
1613 {
1614  cl_platform_id platform;
1615  clGetDeviceInfo(device, CL_DEVICE_PLATFORM, sizeof(platform), &platform, NULL);
1616  return getPlatformVersion(platform);
1617 }
1618 
1619 static cl_uint getContextPlatformVersion(cl_context context)
1620 {
1621  // The platform cannot be queried directly, so we first have to grab a
1622  // device and obtain its context
1623  size_type size = 0;
1624  clGetContextInfo(context, CL_CONTEXT_DEVICES, 0, NULL, &size);
1625  if (size == 0)
1626  return 0;
1627  vector<cl_device_id> devices(size/sizeof(cl_device_id));
1628  clGetContextInfo(context, CL_CONTEXT_DEVICES, size, devices.data(), NULL);
1629  return getDevicePlatformVersion(devices[0]);
1630 }
1631 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
1632 
1633 template <typename T>
1634 class Wrapper
1635 {
1636 public:
1637  typedef T cl_type;
1638 
1639 protected:
1640  cl_type object_;
1641 
1642 public:
1643  Wrapper() : object_(NULL) { }
1644 
1645  Wrapper(const cl_type &obj, bool retainObject) : object_(obj)
1646  {
1647  if (retainObject) {
1648  detail::errHandler(retain(), __RETAIN_ERR);
1649  }
1650  }
1651 
1652  ~Wrapper()
1653  {
1654  if (object_ != NULL) { release(); }
1655  }
1656 
1657  Wrapper(const Wrapper<cl_type>& rhs)
1658  {
1659  object_ = rhs.object_;
1660  detail::errHandler(retain(), __RETAIN_ERR);
1661  }
1662 
1663  Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
1664  {
1665  object_ = rhs.object_;
1666  rhs.object_ = NULL;
1667  }
1668 
1669  Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
1670  {
1671  if (this != &rhs) {
1672  detail::errHandler(release(), __RELEASE_ERR);
1673  object_ = rhs.object_;
1674  detail::errHandler(retain(), __RETAIN_ERR);
1675  }
1676  return *this;
1677  }
1678 
1679  Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
1680  {
1681  if (this != &rhs) {
1682  detail::errHandler(release(), __RELEASE_ERR);
1683  object_ = rhs.object_;
1684  rhs.object_ = NULL;
1685  }
1686  return *this;
1687  }
1688 
1689  Wrapper<cl_type>& operator = (const cl_type &rhs)
1690  {
1691  detail::errHandler(release(), __RELEASE_ERR);
1692  object_ = rhs;
1693  return *this;
1694  }
1695 
1696  const cl_type& operator ()() const { return object_; }
1697 
1698  cl_type& operator ()() { return object_; }
1699 
1700  const cl_type get() const { return object_; }
1701 
1702  cl_type get() { return object_; }
1703 
1704 
1705 protected:
1706  template<typename Func, typename U>
1707  friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
1708 
1709  cl_int retain() const
1710  {
1711  if (object_ != nullptr) {
1712  return ReferenceHandler<cl_type>::retain(object_);
1713  }
1714  else {
1715  return CL_SUCCESS;
1716  }
1717  }
1718 
1719  cl_int release() const
1720  {
1721  if (object_ != nullptr) {
1722  return ReferenceHandler<cl_type>::release(object_);
1723  }
1724  else {
1725  return CL_SUCCESS;
1726  }
1727  }
1728 };
1729 
1730 template <>
1731 class Wrapper<cl_device_id>
1732 {
1733 public:
1734  typedef cl_device_id cl_type;
1735 
1736 protected:
1737  cl_type object_;
1738  bool referenceCountable_;
1739 
1740  static bool isReferenceCountable(cl_device_id device)
1741  {
1742  bool retVal = false;
1743 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1744 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
1745  if (device != NULL) {
1746  int version = getDevicePlatformVersion(device);
1747  if(version > ((1 << 16) + 1)) {
1748  retVal = true;
1749  }
1750  }
1751 #else // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1752  retVal = true;
1753 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1754 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
1755  return retVal;
1756  }
1757 
1758 public:
1759  Wrapper() : object_(NULL), referenceCountable_(false)
1760  {
1761  }
1762 
1763  Wrapper(const cl_type &obj, bool retainObject) :
1764  object_(obj),
1765  referenceCountable_(false)
1766  {
1767  referenceCountable_ = isReferenceCountable(obj);
1768 
1769  if (retainObject) {
1770  detail::errHandler(retain(), __RETAIN_ERR);
1771  }
1772  }
1773 
1774  ~Wrapper()
1775  {
1776  release();
1777  }
1778 
1779  Wrapper(const Wrapper<cl_type>& rhs)
1780  {
1781  object_ = rhs.object_;
1782  referenceCountable_ = isReferenceCountable(object_);
1783  detail::errHandler(retain(), __RETAIN_ERR);
1784  }
1785 
1786  Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
1787  {
1788  object_ = rhs.object_;
1789  referenceCountable_ = rhs.referenceCountable_;
1790  rhs.object_ = NULL;
1791  rhs.referenceCountable_ = false;
1792  }
1793 
1794  Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
1795  {
1796  if (this != &rhs) {
1797  detail::errHandler(release(), __RELEASE_ERR);
1798  object_ = rhs.object_;
1799  referenceCountable_ = rhs.referenceCountable_;
1800  detail::errHandler(retain(), __RETAIN_ERR);
1801  }
1802  return *this;
1803  }
1804 
1805  Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
1806  {
1807  if (this != &rhs) {
1808  detail::errHandler(release(), __RELEASE_ERR);
1809  object_ = rhs.object_;
1810  referenceCountable_ = rhs.referenceCountable_;
1811  rhs.object_ = NULL;
1812  rhs.referenceCountable_ = false;
1813  }
1814  return *this;
1815  }
1816 
1817  Wrapper<cl_type>& operator = (const cl_type &rhs)
1818  {
1819  detail::errHandler(release(), __RELEASE_ERR);
1820  object_ = rhs;
1821  referenceCountable_ = isReferenceCountable(object_);
1822  return *this;
1823  }
1824 
1825  const cl_type& operator ()() const { return object_; }
1826 
1827  cl_type& operator ()() { return object_; }
1828 
1829  const cl_type get() const { return object_; }
1830 
1831  cl_type get() { return object_; }
1832 
1833 protected:
1834  template<typename Func, typename U>
1835  friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
1836 
1837  template<typename Func, typename U>
1838  friend inline cl_int getInfoHelper(Func, cl_uint, vector<U>*, int, typename U::cl_type);
1839 
1840  cl_int retain() const
1841  {
1842  if( object_ != nullptr && referenceCountable_ ) {
1843  return ReferenceHandler<cl_type>::retain(object_);
1844  }
1845  else {
1846  return CL_SUCCESS;
1847  }
1848  }
1849 
1850  cl_int release() const
1851  {
1852  if (object_ != nullptr && referenceCountable_) {
1853  return ReferenceHandler<cl_type>::release(object_);
1854  }
1855  else {
1856  return CL_SUCCESS;
1857  }
1858  }
1859 };
1860 
1861 template <typename T>
1862 inline bool operator==(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
1863 {
1864  return lhs() == rhs();
1865 }
1866 
1867 template <typename T>
1868 inline bool operator!=(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
1869 {
1870  return !operator==(lhs, rhs);
1871 }
1872 
1873 } // namespace detail
1875 
1876 
1877 using BuildLogType = vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, CL_PROGRAM_BUILD_LOG>::param_type>>;
1878 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
1879 
1882 class BuildError : public Error
1883 {
1884 private:
1885  BuildLogType buildLogs;
1886 public:
1887  BuildError(cl_int err, const char * errStr, const BuildLogType &vec) : Error(err, errStr), buildLogs(vec)
1888  {
1889  }
1890 
1891  BuildLogType getBuildLog() const
1892  {
1893  return buildLogs;
1894  }
1895 };
1896 namespace detail {
1897  static inline cl_int buildErrHandler(
1898  cl_int err,
1899  const char * errStr,
1900  const BuildLogType &buildLogs)
1901  {
1902  if (err != CL_SUCCESS) {
1903  throw BuildError(err, errStr, buildLogs);
1904  }
1905  return err;
1906  }
1907 } // namespace detail
1908 
1909 #else
1910 namespace detail {
1911  static inline cl_int buildErrHandler(
1912  cl_int err,
1913  const char * errStr,
1914  const BuildLogType &buildLogs)
1915  {
1916  (void)buildLogs; // suppress unused variable warning
1917  (void)errStr;
1918  return err;
1919  }
1920 } // namespace detail
1921 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
1922 
1923 
1929 struct ImageFormat : public cl_image_format
1930 {
1933 
1935  ImageFormat(cl_channel_order order, cl_channel_type type)
1936  {
1937  image_channel_order = order;
1938  image_channel_data_type = type;
1939  }
1940 
1943  {
1944  if (this != &rhs) {
1945  this->image_channel_data_type = rhs.image_channel_data_type;
1946  this->image_channel_order = rhs.image_channel_order;
1947  }
1948  return *this;
1949  }
1950 };
1951 
1959 class Device : public detail::Wrapper<cl_device_id>
1960 {
1961 private:
1962  static std::once_flag default_initialized_;
1963  static Device default_;
1964  static cl_int default_error_;
1965 
1971  static void makeDefault();
1972 
1978  static void makeDefaultProvided(const Device &p) {
1979  default_ = p;
1980  }
1981 
1982 public:
1983 #ifdef CL_HPP_UNIT_TEST_ENABLE
1984 
1990  static void unitTestClearDefault() {
1991  default_ = Device();
1992  }
1993 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
1994 
1996  Device() : detail::Wrapper<cl_type>() { }
1997 
2002  explicit Device(const cl_device_id &device, bool retainObject = false) :
2003  detail::Wrapper<cl_type>(device, retainObject) { }
2004 
2010  cl_int *errResult = NULL)
2011  {
2012  std::call_once(default_initialized_, makeDefault);
2013  detail::errHandler(default_error_);
2014  if (errResult != NULL) {
2015  *errResult = default_error_;
2016  }
2017  return default_;
2018  }
2019 
2027  static Device setDefault(const Device &default_device)
2028  {
2029  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_device));
2030  detail::errHandler(default_error_);
2031  return default_;
2032  }
2033 
2038  Device& operator = (const cl_device_id& rhs)
2039  {
2041  return *this;
2042  }
2043 
2047  Device(const Device& dev) : detail::Wrapper<cl_type>(dev) {}
2048 
2053  {
2055  return *this;
2056  }
2057 
2061  Device(Device&& dev) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(dev)) {}
2062 
2067  {
2068  detail::Wrapper<cl_type>::operator=(std::move(dev));
2069  return *this;
2070  }
2071 
2073  template <typename T>
2074  cl_int getInfo(cl_device_info name, T* param) const
2075  {
2076  return detail::errHandler(
2077  detail::getInfo(&::clGetDeviceInfo, object_, name, param),
2078  __GET_DEVICE_INFO_ERR);
2079  }
2080 
2082  template <cl_int name> typename
2084  getInfo(cl_int* err = NULL) const
2085  {
2086  typename detail::param_traits<
2087  detail::cl_device_info, name>::param_type param;
2088  cl_int result = getInfo(name, &param);
2089  if (err != NULL) {
2090  *err = result;
2091  }
2092  return param;
2093  }
2094 
2098 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
2099  cl_int createSubDevices(
2101  const cl_device_partition_property * properties,
2102  vector<Device>* devices)
2103  {
2104  cl_uint n = 0;
2105  cl_int err = clCreateSubDevices(object_, properties, 0, NULL, &n);
2106  if (err != CL_SUCCESS) {
2107  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2108  }
2109 
2110  vector<cl_device_id> ids(n);
2111  err = clCreateSubDevices(object_, properties, n, ids.data(), NULL);
2112  if (err != CL_SUCCESS) {
2113  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2114  }
2115 
2116  // Cannot trivially assign because we need to capture intermediates
2117  // with safe construction
2118  if (devices) {
2119  devices->resize(ids.size());
2120 
2121  // Assign to param, constructing with retain behaviour
2122  // to correctly capture each underlying CL object
2123  for (size_type i = 0; i < ids.size(); i++) {
2124  // We do not need to retain because this device is being created
2125  // by the runtime
2126  (*devices)[i] = Device(ids[i], false);
2127  }
2128  }
2129 
2130  return CL_SUCCESS;
2131  }
2132 #elif defined(CL_HPP_USE_CL_DEVICE_FISSION)
2133 
2137  cl_int createSubDevices(
2138  const cl_device_partition_property_ext * properties,
2139  vector<Device>* devices)
2140  {
2141  typedef CL_API_ENTRY cl_int
2142  ( CL_API_CALL * PFN_clCreateSubDevicesEXT)(
2143  cl_device_id /*in_device*/,
2144  const cl_device_partition_property_ext * /* properties */,
2145  cl_uint /*num_entries*/,
2146  cl_device_id * /*out_devices*/,
2147  cl_uint * /*num_devices*/ ) CL_EXT_SUFFIX__VERSION_1_1;
2148 
2149  static PFN_clCreateSubDevicesEXT pfn_clCreateSubDevicesEXT = NULL;
2150  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateSubDevicesEXT);
2151 
2152  cl_uint n = 0;
2153  cl_int err = pfn_clCreateSubDevicesEXT(object_, properties, 0, NULL, &n);
2154  if (err != CL_SUCCESS) {
2155  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2156  }
2157 
2158  vector<cl_device_id> ids(n);
2159  err = pfn_clCreateSubDevicesEXT(object_, properties, n, ids.data(), NULL);
2160  if (err != CL_SUCCESS) {
2161  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2162  }
2163  // Cannot trivially assign because we need to capture intermediates
2164  // with safe construction
2165  if (devices) {
2166  devices->resize(ids.size());
2167 
2168  // Assign to param, constructing with retain behaviour
2169  // to correctly capture each underlying CL object
2170  for (size_type i = 0; i < ids.size(); i++) {
2171  // We do not need to retain because this device is being created
2172  // by the runtime
2173  (*devices)[i] = Device(ids[i], false);
2174  }
2175  }
2176  return CL_SUCCESS;
2177  }
2178 #endif // defined(CL_HPP_USE_CL_DEVICE_FISSION)
2179 };
2180 
2181 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Device::default_initialized_;
2182 CL_HPP_DEFINE_STATIC_MEMBER_ Device Device::default_;
2183 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Device::default_error_ = CL_SUCCESS;
2184 
2192 class Platform : public detail::Wrapper<cl_platform_id>
2193 {
2194 private:
2195  static std::once_flag default_initialized_;
2196  static Platform default_;
2197  static cl_int default_error_;
2198 
2204  static void makeDefault() {
2205  /* Throwing an exception from a call_once invocation does not do
2206  * what we wish, so we catch it and save the error.
2207  */
2208 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2209  try
2210 #endif
2211  {
2212  // If default wasn't passed ,generate one
2213  // Otherwise set it
2214  cl_uint n = 0;
2215 
2216  cl_int err = ::clGetPlatformIDs(0, NULL, &n);
2217  if (err != CL_SUCCESS) {
2218  default_error_ = err;
2219  return;
2220  }
2221  if (n == 0) {
2222  default_error_ = CL_INVALID_PLATFORM;
2223  return;
2224  }
2225 
2226  vector<cl_platform_id> ids(n);
2227  err = ::clGetPlatformIDs(n, ids.data(), NULL);
2228  if (err != CL_SUCCESS) {
2229  default_error_ = err;
2230  return;
2231  }
2232 
2233  default_ = Platform(ids[0]);
2234  }
2235 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2236  catch (cl::Error &e) {
2237  default_error_ = e.err();
2238  }
2239 #endif
2240  }
2241 
2247  static void makeDefaultProvided(const Platform &p) {
2248  default_ = p;
2249  }
2250 
2251 public:
2252 #ifdef CL_HPP_UNIT_TEST_ENABLE
2253 
2259  static void unitTestClearDefault() {
2260  default_ = Platform();
2261  }
2262 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2263 
2265  Platform() : detail::Wrapper<cl_type>() { }
2266 
2274  explicit Platform(const cl_platform_id &platform, bool retainObject = false) :
2275  detail::Wrapper<cl_type>(platform, retainObject) { }
2276 
2281  Platform& operator = (const cl_platform_id& rhs)
2282  {
2284  return *this;
2285  }
2286 
2287  static Platform getDefault(
2288  cl_int *errResult = NULL)
2289  {
2290  std::call_once(default_initialized_, makeDefault);
2291  detail::errHandler(default_error_);
2292  if (errResult != NULL) {
2293  *errResult = default_error_;
2294  }
2295  return default_;
2296  }
2297 
2305  static Platform setDefault(const Platform &default_platform)
2306  {
2307  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_platform));
2308  detail::errHandler(default_error_);
2309  return default_;
2310  }
2311 
2313  cl_int getInfo(cl_platform_info name, string* param) const
2314  {
2315  return detail::errHandler(
2316  detail::getInfo(&::clGetPlatformInfo, object_, name, param),
2317  __GET_PLATFORM_INFO_ERR);
2318  }
2319 
2321  template <cl_int name> typename
2323  getInfo(cl_int* err = NULL) const
2324  {
2325  typename detail::param_traits<
2326  detail::cl_platform_info, name>::param_type param;
2327  cl_int result = getInfo(name, &param);
2328  if (err != NULL) {
2329  *err = result;
2330  }
2331  return param;
2332  }
2333 
2338  cl_int getDevices(
2339  cl_device_type type,
2340  vector<Device>* devices) const
2341  {
2342  cl_uint n = 0;
2343  if( devices == NULL ) {
2344  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
2345  }
2346  cl_int err = ::clGetDeviceIDs(object_, type, 0, NULL, &n);
2347  if (err != CL_SUCCESS) {
2348  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2349  }
2350 
2351  vector<cl_device_id> ids(n);
2352  err = ::clGetDeviceIDs(object_, type, n, ids.data(), NULL);
2353  if (err != CL_SUCCESS) {
2354  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2355  }
2356 
2357  // Cannot trivially assign because we need to capture intermediates
2358  // with safe construction
2359  // We must retain things we obtain from the API to avoid releasing
2360  // API-owned objects.
2361  if (devices) {
2362  devices->resize(ids.size());
2363 
2364  // Assign to param, constructing with retain behaviour
2365  // to correctly capture each underlying CL object
2366  for (size_type i = 0; i < ids.size(); i++) {
2367  (*devices)[i] = Device(ids[i], true);
2368  }
2369  }
2370  return CL_SUCCESS;
2371  }
2372 
2373 #if defined(CL_HPP_USE_DX_INTEROP)
2374 
2397  cl_int getDevices(
2398  cl_d3d10_device_source_khr d3d_device_source,
2399  void * d3d_object,
2400  cl_d3d10_device_set_khr d3d_device_set,
2401  vector<Device>* devices) const
2402  {
2403  typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clGetDeviceIDsFromD3D10KHR)(
2404  cl_platform_id platform,
2405  cl_d3d10_device_source_khr d3d_device_source,
2406  void * d3d_object,
2407  cl_d3d10_device_set_khr d3d_device_set,
2408  cl_uint num_entries,
2409  cl_device_id * devices,
2410  cl_uint* num_devices);
2411 
2412  if( devices == NULL ) {
2413  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
2414  }
2415 
2416  static PFN_clGetDeviceIDsFromD3D10KHR pfn_clGetDeviceIDsFromD3D10KHR = NULL;
2417  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(object_, clGetDeviceIDsFromD3D10KHR);
2418 
2419  cl_uint n = 0;
2420  cl_int err = pfn_clGetDeviceIDsFromD3D10KHR(
2421  object_,
2422  d3d_device_source,
2423  d3d_object,
2424  d3d_device_set,
2425  0,
2426  NULL,
2427  &n);
2428  if (err != CL_SUCCESS) {
2429  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2430  }
2431 
2432  vector<cl_device_id> ids(n);
2433  err = pfn_clGetDeviceIDsFromD3D10KHR(
2434  object_,
2435  d3d_device_source,
2436  d3d_object,
2437  d3d_device_set,
2438  n,
2439  ids.data(),
2440  NULL);
2441  if (err != CL_SUCCESS) {
2442  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2443  }
2444 
2445  // Cannot trivially assign because we need to capture intermediates
2446  // with safe construction
2447  // We must retain things we obtain from the API to avoid releasing
2448  // API-owned objects.
2449  if (devices) {
2450  devices->resize(ids.size());
2451 
2452  // Assign to param, constructing with retain behaviour
2453  // to correctly capture each underlying CL object
2454  for (size_type i = 0; i < ids.size(); i++) {
2455  (*devices)[i] = Device(ids[i], true);
2456  }
2457  }
2458  return CL_SUCCESS;
2459  }
2460 #endif
2461 
2466  static cl_int get(
2467  vector<Platform>* platforms)
2468  {
2469  cl_uint n = 0;
2470 
2471  if( platforms == NULL ) {
2472  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_PLATFORM_IDS_ERR);
2473  }
2474 
2475  cl_int err = ::clGetPlatformIDs(0, NULL, &n);
2476  if (err != CL_SUCCESS) {
2477  return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
2478  }
2479 
2480  vector<cl_platform_id> ids(n);
2481  err = ::clGetPlatformIDs(n, ids.data(), NULL);
2482  if (err != CL_SUCCESS) {
2483  return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
2484  }
2485 
2486  if (platforms) {
2487  platforms->resize(ids.size());
2488 
2489  // Platforms don't reference count
2490  for (size_type i = 0; i < ids.size(); i++) {
2491  (*platforms)[i] = Platform(ids[i]);
2492  }
2493  }
2494  return CL_SUCCESS;
2495  }
2496 
2501  static cl_int get(
2502  Platform * platform)
2503  {
2504  cl_int err;
2505  Platform default_platform = Platform::getDefault(&err);
2506  if (platform) {
2507  *platform = default_platform;
2508  }
2509  return err;
2510  }
2511 
2520  static Platform get(
2521  cl_int * errResult = NULL)
2522  {
2523  cl_int err;
2524  Platform default_platform = Platform::getDefault(&err);
2525  if (errResult) {
2526  *errResult = err;
2527  }
2528  return default_platform;
2529  }
2530 
2531 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
2532  cl_int
2535  {
2536  return ::clUnloadPlatformCompiler(object_);
2537  }
2538 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
2539 }; // class Platform
2540 
2541 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Platform::default_initialized_;
2542 CL_HPP_DEFINE_STATIC_MEMBER_ Platform Platform::default_;
2543 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Platform::default_error_ = CL_SUCCESS;
2544 
2545 
2549 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
2550 
2554 inline CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int
2555 UnloadCompiler() CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
2556 inline cl_int
2558 {
2559  return ::clUnloadCompiler();
2560 }
2561 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
2562 
2571 class Context
2572  : public detail::Wrapper<cl_context>
2573 {
2574 private:
2575  static std::once_flag default_initialized_;
2576  static Context default_;
2577  static cl_int default_error_;
2578 
2584  static void makeDefault() {
2585  /* Throwing an exception from a call_once invocation does not do
2586  * what we wish, so we catch it and save the error.
2587  */
2588 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2589  try
2590 #endif
2591  {
2592 #if !defined(__APPLE__) && !defined(__MACOS)
2593  const Platform &p = Platform::getDefault();
2594  cl_platform_id defaultPlatform = p();
2595  cl_context_properties properties[3] = {
2596  CL_CONTEXT_PLATFORM, (cl_context_properties)defaultPlatform, 0
2597  };
2598 #else // #if !defined(__APPLE__) && !defined(__MACOS)
2599  cl_context_properties *properties = nullptr;
2600 #endif // #if !defined(__APPLE__) && !defined(__MACOS)
2601 
2602  default_ = Context(
2603  CL_DEVICE_TYPE_DEFAULT,
2604  properties,
2605  NULL,
2606  NULL,
2607  &default_error_);
2608  }
2609 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2610  catch (cl::Error &e) {
2611  default_error_ = e.err();
2612  }
2613 #endif
2614  }
2615 
2616 
2622  static void makeDefaultProvided(const Context &c) {
2623  default_ = c;
2624  }
2625 
2626 public:
2627 #ifdef CL_HPP_UNIT_TEST_ENABLE
2628 
2634  static void unitTestClearDefault() {
2635  default_ = Context();
2636  }
2637 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2638 
2644  const vector<Device>& devices,
2645  cl_context_properties* properties = NULL,
2646  void (CL_CALLBACK * notifyFptr)(
2647  const char *,
2648  const void *,
2649  size_type,
2650  void *) = NULL,
2651  void* data = NULL,
2652  cl_int* err = NULL)
2653  {
2654  cl_int error;
2655 
2656  size_type numDevices = devices.size();
2657  vector<cl_device_id> deviceIDs(numDevices);
2658 
2659  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
2660  deviceIDs[deviceIndex] = (devices[deviceIndex])();
2661  }
2662 
2663  object_ = ::clCreateContext(
2664  properties, (cl_uint) numDevices,
2665  deviceIDs.data(),
2666  notifyFptr, data, &error);
2667 
2668  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2669  if (err != NULL) {
2670  *err = error;
2671  }
2672  }
2673 
2674  Context(
2675  const Device& device,
2676  cl_context_properties* properties = NULL,
2677  void (CL_CALLBACK * notifyFptr)(
2678  const char *,
2679  const void *,
2680  size_type,
2681  void *) = NULL,
2682  void* data = NULL,
2683  cl_int* err = NULL)
2684  {
2685  cl_int error;
2686 
2687  cl_device_id deviceID = device();
2688 
2689  object_ = ::clCreateContext(
2690  properties, 1,
2691  &deviceID,
2692  notifyFptr, data, &error);
2693 
2694  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2695  if (err != NULL) {
2696  *err = error;
2697  }
2698  }
2699 
2705  cl_device_type type,
2706  cl_context_properties* properties = NULL,
2707  void (CL_CALLBACK * notifyFptr)(
2708  const char *,
2709  const void *,
2710  size_type,
2711  void *) = NULL,
2712  void* data = NULL,
2713  cl_int* err = NULL)
2714  {
2715  cl_int error;
2716 
2717 #if !defined(__APPLE__) && !defined(__MACOS)
2718  cl_context_properties prop[4] = {CL_CONTEXT_PLATFORM, 0, 0, 0 };
2719 
2720  if (properties == NULL) {
2721  // Get a valid platform ID as we cannot send in a blank one
2722  vector<Platform> platforms;
2723  error = Platform::get(&platforms);
2724  if (error != CL_SUCCESS) {
2725  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2726  if (err != NULL) {
2727  *err = error;
2728  }
2729  return;
2730  }
2731 
2732  // Check the platforms we found for a device of our specified type
2733  cl_context_properties platform_id = 0;
2734  for (unsigned int i = 0; i < platforms.size(); i++) {
2735 
2736  vector<Device> devices;
2737 
2738 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2739  try {
2740 #endif
2741 
2742  error = platforms[i].getDevices(type, &devices);
2743 
2744 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2745  } catch (Error) {}
2746  // Catch if exceptions are enabled as we don't want to exit if first platform has no devices of type
2747  // We do error checking next anyway, and can throw there if needed
2748 #endif
2749 
2750  // Only squash CL_SUCCESS and CL_DEVICE_NOT_FOUND
2751  if (error != CL_SUCCESS && error != CL_DEVICE_NOT_FOUND) {
2752  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2753  if (err != NULL) {
2754  *err = error;
2755  }
2756  }
2757 
2758  if (devices.size() > 0) {
2759  platform_id = (cl_context_properties)platforms[i]();
2760  break;
2761  }
2762  }
2763 
2764  if (platform_id == 0) {
2765  detail::errHandler(CL_DEVICE_NOT_FOUND, __CREATE_CONTEXT_FROM_TYPE_ERR);
2766  if (err != NULL) {
2767  *err = CL_DEVICE_NOT_FOUND;
2768  }
2769  return;
2770  }
2771 
2772  prop[1] = platform_id;
2773  properties = &prop[0];
2774  }
2775 #endif
2776  object_ = ::clCreateContextFromType(
2777  properties, type, notifyFptr, data, &error);
2778 
2779  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2780  if (err != NULL) {
2781  *err = error;
2782  }
2783  }
2784 
2788  Context(const Context& ctx) : detail::Wrapper<cl_type>(ctx) {}
2789 
2794  {
2796  return *this;
2797  }
2798 
2802  Context(Context&& ctx) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(ctx)) {}
2803 
2808  {
2809  detail::Wrapper<cl_type>::operator=(std::move(ctx));
2810  return *this;
2811  }
2812 
2813 
2818  static Context getDefault(cl_int * err = NULL)
2819  {
2820  std::call_once(default_initialized_, makeDefault);
2821  detail::errHandler(default_error_);
2822  if (err != NULL) {
2823  *err = default_error_;
2824  }
2825  return default_;
2826  }
2827 
2835  static Context setDefault(const Context &default_context)
2836  {
2837  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_context));
2838  detail::errHandler(default_error_);
2839  return default_;
2840  }
2841 
2843  Context() : detail::Wrapper<cl_type>() { }
2844 
2850  explicit Context(const cl_context& context, bool retainObject = false) :
2851  detail::Wrapper<cl_type>(context, retainObject) { }
2852 
2858  Context& operator = (const cl_context& rhs)
2859  {
2861  return *this;
2862  }
2863 
2865  template <typename T>
2866  cl_int getInfo(cl_context_info name, T* param) const
2867  {
2868  return detail::errHandler(
2869  detail::getInfo(&::clGetContextInfo, object_, name, param),
2870  __GET_CONTEXT_INFO_ERR);
2871  }
2872 
2874  template <cl_int name> typename
2876  getInfo(cl_int* err = NULL) const
2877  {
2878  typename detail::param_traits<
2879  detail::cl_context_info, name>::param_type param;
2880  cl_int result = getInfo(name, &param);
2881  if (err != NULL) {
2882  *err = result;
2883  }
2884  return param;
2885  }
2886 
2892  cl_mem_flags flags,
2893  cl_mem_object_type type,
2894  vector<ImageFormat>* formats) const
2895  {
2896  cl_uint numEntries;
2897 
2898  if (!formats) {
2899  return CL_SUCCESS;
2900  }
2901 
2902  cl_int err = ::clGetSupportedImageFormats(
2903  object_,
2904  flags,
2905  type,
2906  0,
2907  NULL,
2908  &numEntries);
2909  if (err != CL_SUCCESS) {
2910  return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
2911  }
2912 
2913  if (numEntries > 0) {
2914  vector<ImageFormat> value(numEntries);
2915  err = ::clGetSupportedImageFormats(
2916  object_,
2917  flags,
2918  type,
2919  numEntries,
2920  (cl_image_format*)value.data(),
2921  NULL);
2922  if (err != CL_SUCCESS) {
2923  return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
2924  }
2925 
2926  formats->assign(begin(value), end(value));
2927  }
2928  else {
2929  // If no values are being returned, ensure an empty vector comes back
2930  formats->clear();
2931  }
2932 
2933  return CL_SUCCESS;
2934  }
2935 };
2936 
2937 inline void Device::makeDefault()
2938 {
2939  /* Throwing an exception from a call_once invocation does not do
2940  * what we wish, so we catch it and save the error.
2941  */
2942 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2943  try
2944 #endif
2945  {
2946  cl_int error = 0;
2947 
2948  Context context = Context::getDefault(&error);
2949  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2950 
2951  if (error != CL_SUCCESS) {
2952  default_error_ = error;
2953  }
2954  else {
2955  default_ = context.getInfo<CL_CONTEXT_DEVICES>()[0];
2956  default_error_ = CL_SUCCESS;
2957  }
2958  }
2959 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2960  catch (cl::Error &e) {
2961  default_error_ = e.err();
2962  }
2963 #endif
2964 }
2965 
2966 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Context::default_initialized_;
2967 CL_HPP_DEFINE_STATIC_MEMBER_ Context Context::default_;
2968 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Context::default_error_ = CL_SUCCESS;
2969 
2978 class Event : public detail::Wrapper<cl_event>
2979 {
2980 public:
2982  Event() : detail::Wrapper<cl_type>() { }
2983 
2992  explicit Event(const cl_event& event, bool retainObject = false) :
2993  detail::Wrapper<cl_type>(event, retainObject) { }
2994 
3000  Event& operator = (const cl_event& rhs)
3001  {
3003  return *this;
3004  }
3005 
3007  template <typename T>
3008  cl_int getInfo(cl_event_info name, T* param) const
3009  {
3010  return detail::errHandler(
3011  detail::getInfo(&::clGetEventInfo, object_, name, param),
3012  __GET_EVENT_INFO_ERR);
3013  }
3014 
3016  template <cl_int name> typename
3018  getInfo(cl_int* err = NULL) const
3019  {
3020  typename detail::param_traits<
3021  detail::cl_event_info, name>::param_type param;
3022  cl_int result = getInfo(name, &param);
3023  if (err != NULL) {
3024  *err = result;
3025  }
3026  return param;
3027  }
3028 
3030  template <typename T>
3031  cl_int getProfilingInfo(cl_profiling_info name, T* param) const
3032  {
3033  return detail::errHandler(detail::getInfo(
3034  &::clGetEventProfilingInfo, object_, name, param),
3035  __GET_EVENT_PROFILE_INFO_ERR);
3036  }
3037 
3039  template <cl_int name> typename
3041  getProfilingInfo(cl_int* err = NULL) const
3042  {
3043  typename detail::param_traits<
3044  detail::cl_profiling_info, name>::param_type param;
3045  cl_int result = getProfilingInfo(name, &param);
3046  if (err != NULL) {
3047  *err = result;
3048  }
3049  return param;
3050  }
3051 
3056  cl_int wait() const
3057  {
3058  return detail::errHandler(
3059  ::clWaitForEvents(1, &object_),
3060  __WAIT_FOR_EVENTS_ERR);
3061  }
3062 
3063 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3064 
3068  cl_int setCallback(
3069  cl_int type,
3070  void (CL_CALLBACK * pfn_notify)(cl_event, cl_int, void *),
3071  void * user_data = NULL)
3072  {
3073  return detail::errHandler(
3074  ::clSetEventCallback(
3075  object_,
3076  type,
3077  pfn_notify,
3078  user_data),
3079  __SET_EVENT_CALLBACK_ERR);
3080  }
3081 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3082 
3087  static cl_int
3088  waitForEvents(const vector<Event>& events)
3089  {
3090  return detail::errHandler(
3091  ::clWaitForEvents(
3092  (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
3093  __WAIT_FOR_EVENTS_ERR);
3094  }
3095 };
3096 
3097 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3098 
3102 class UserEvent : public Event
3103 {
3104 public:
3110  const Context& context,
3111  cl_int * err = NULL)
3112  {
3113  cl_int error;
3114  object_ = ::clCreateUserEvent(
3115  context(),
3116  &error);
3117 
3118  detail::errHandler(error, __CREATE_USER_EVENT_ERR);
3119  if (err != NULL) {
3120  *err = error;
3121  }
3122  }
3123 
3125  UserEvent() : Event() { }
3126 
3131  cl_int setStatus(cl_int status)
3132  {
3133  return detail::errHandler(
3134  ::clSetUserEventStatus(object_,status),
3135  __SET_USER_EVENT_STATUS_ERR);
3136  }
3137 };
3138 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3139 
3144 inline static cl_int
3145 WaitForEvents(const vector<Event>& events)
3146 {
3147  return detail::errHandler(
3148  ::clWaitForEvents(
3149  (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
3150  __WAIT_FOR_EVENTS_ERR);
3151 }
3152 
3161 class Memory : public detail::Wrapper<cl_mem>
3162 {
3163 public:
3165  Memory() : detail::Wrapper<cl_type>() { }
3166 
3178  explicit Memory(const cl_mem& memory, bool retainObject) :
3179  detail::Wrapper<cl_type>(memory, retainObject) { }
3180 
3186  Memory& operator = (const cl_mem& rhs)
3187  {
3189  return *this;
3190  }
3191 
3195  Memory(const Memory& mem) : detail::Wrapper<cl_type>(mem) {}
3196 
3201  {
3203  return *this;
3204  }
3205 
3209  Memory(Memory&& mem) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(mem)) {}
3210 
3215  {
3216  detail::Wrapper<cl_type>::operator=(std::move(mem));
3217  return *this;
3218  }
3219 
3220 
3222  template <typename T>
3223  cl_int getInfo(cl_mem_info name, T* param) const
3224  {
3225  return detail::errHandler(
3226  detail::getInfo(&::clGetMemObjectInfo, object_, name, param),
3227  __GET_MEM_OBJECT_INFO_ERR);
3228  }
3229 
3231  template <cl_int name> typename
3233  getInfo(cl_int* err = NULL) const
3234  {
3235  typename detail::param_traits<
3236  detail::cl_mem_info, name>::param_type param;
3237  cl_int result = getInfo(name, &param);
3238  if (err != NULL) {
3239  *err = result;
3240  }
3241  return param;
3242  }
3243 
3244 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3245 
3259  void (CL_CALLBACK * pfn_notify)(cl_mem, void *),
3260  void * user_data = NULL)
3261  {
3262  return detail::errHandler(
3263  ::clSetMemObjectDestructorCallback(
3264  object_,
3265  pfn_notify,
3266  user_data),
3267  __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR);
3268  }
3269 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3270 
3271 };
3272 
3273 // Pre-declare copy functions
3274 class Buffer;
3275 template< typename IteratorType >
3276 cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
3277 template< typename IteratorType >
3278 cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
3279 template< typename IteratorType >
3280 cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
3281 template< typename IteratorType >
3282 cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
3283 
3284 
3285 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
3286 namespace detail
3287 {
3289  {
3290  public:
3291  static cl_svm_mem_flags getSVMMemFlags()
3292  {
3293  return 0;
3294  }
3295  };
3296 } // namespace detail
3297 
3298 template<class Trait = detail::SVMTraitNull>
3300 {
3301 public:
3302  static cl_svm_mem_flags getSVMMemFlags()
3303  {
3304  return CL_MEM_READ_WRITE |
3305  Trait::getSVMMemFlags();
3306  }
3307 };
3308 
3309 template<class Trait = detail::SVMTraitNull>
3311 {
3312 public:
3313  static cl_svm_mem_flags getSVMMemFlags()
3314  {
3315  return CL_MEM_READ_ONLY |
3316  Trait::getSVMMemFlags();
3317  }
3318 };
3319 
3320 template<class Trait = detail::SVMTraitNull>
3322 {
3323 public:
3324  static cl_svm_mem_flags getSVMMemFlags()
3325  {
3326  return CL_MEM_WRITE_ONLY |
3327  Trait::getSVMMemFlags();
3328  }
3329 };
3330 
3331 template<class Trait = SVMTraitReadWrite<>>
3333 {
3334 public:
3335  static cl_svm_mem_flags getSVMMemFlags()
3336  {
3337  return Trait::getSVMMemFlags();
3338  }
3339 };
3340 
3341 template<class Trait = SVMTraitReadWrite<>>
3343 {
3344 public:
3345  static cl_svm_mem_flags getSVMMemFlags()
3346  {
3347  return CL_MEM_SVM_FINE_GRAIN_BUFFER |
3348  Trait::getSVMMemFlags();
3349  }
3350 };
3351 
3352 template<class Trait = SVMTraitReadWrite<>>
3354 {
3355 public:
3356  static cl_svm_mem_flags getSVMMemFlags()
3357  {
3358  return
3359  CL_MEM_SVM_FINE_GRAIN_BUFFER |
3360  CL_MEM_SVM_ATOMICS |
3361  Trait::getSVMMemFlags();
3362  }
3363 };
3364 
3365 // Pre-declare SVM map function
3366 template<typename T>
3367 inline cl_int enqueueMapSVM(
3368  T* ptr,
3369  cl_bool blocking,
3370  cl_map_flags flags,
3371  size_type size,
3372  const vector<Event>* events = NULL,
3373  Event* event = NULL);
3374 
3386 template<typename T, class SVMTrait>
3388 private:
3389  Context context_;
3390 
3391 public:
3392  typedef T value_type;
3393  typedef value_type* pointer;
3394  typedef const value_type* const_pointer;
3395  typedef value_type& reference;
3396  typedef const value_type& const_reference;
3397  typedef std::size_t size_type;
3398  typedef std::ptrdiff_t difference_type;
3399 
3400  template<typename U>
3401  struct rebind
3402  {
3404  };
3405 
3406  template<typename U, typename V>
3407  friend class SVMAllocator;
3408 
3409  SVMAllocator() :
3410  context_(Context::getDefault())
3411  {
3412  }
3413 
3414  explicit SVMAllocator(cl::Context context) :
3415  context_(context)
3416  {
3417  }
3418 
3419 
3420  SVMAllocator(const SVMAllocator &other) :
3421  context_(other.context_)
3422  {
3423  }
3424 
3425  template<typename U>
3426  SVMAllocator(const SVMAllocator<U, SVMTrait> &other) :
3427  context_(other.context_)
3428  {
3429  }
3430 
3431  ~SVMAllocator()
3432  {
3433  }
3434 
3435  pointer address(reference r) CL_HPP_NOEXCEPT_
3436  {
3437  return std::addressof(r);
3438  }
3439 
3440  const_pointer address(const_reference r) CL_HPP_NOEXCEPT_
3441  {
3442  return std::addressof(r);
3443  }
3444 
3451  pointer allocate(
3452  size_type size,
3454  {
3455  // Allocate memory with default alignment matching the size of the type
3456  void* voidPointer =
3457  clSVMAlloc(
3458  context_(),
3459  SVMTrait::getSVMMemFlags(),
3460  size*sizeof(T),
3461  0);
3462  pointer retValue = reinterpret_cast<pointer>(
3463  voidPointer);
3464 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3465  if (!retValue) {
3466  std::bad_alloc excep;
3467  throw excep;
3468  }
3469 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3470 
3471  // If allocation was coarse-grained then map it
3472  if (!(SVMTrait::getSVMMemFlags() & CL_MEM_SVM_FINE_GRAIN_BUFFER)) {
3473  cl_int err = enqueueMapSVM(retValue, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE, size*sizeof(T));
3474  if (err != CL_SUCCESS) {
3475  std::bad_alloc excep;
3476  throw excep;
3477  }
3478  }
3479 
3480  // If exceptions disabled, return null pointer from allocator
3481  return retValue;
3482  }
3483 
3484  void deallocate(pointer p, size_type)
3485  {
3486  clSVMFree(context_(), p);
3487  }
3488 
3493  size_type max_size() const CL_HPP_NOEXCEPT_
3494  {
3495  size_type maxSize = std::numeric_limits<size_type>::max() / sizeof(T);
3496 
3497  for (Device &d : context_.getInfo<CL_CONTEXT_DEVICES>()) {
3498  maxSize = std::min(
3499  maxSize,
3500  static_cast<size_type>(d.getInfo<CL_DEVICE_MAX_MEM_ALLOC_SIZE>()));
3501  }
3502 
3503  return maxSize;
3504  }
3505 
3506  template< class U, class... Args >
3507  void construct(U* p, Args&&... args)
3508  {
3509  new(p)T(args...);
3510  }
3511 
3512  template< class U >
3513  void destroy(U* p)
3514  {
3515  p->~U();
3516  }
3517 
3521  inline bool operator==(SVMAllocator const& rhs)
3522  {
3523  return (context_==rhs.context_);
3524  }
3525 
3526  inline bool operator!=(SVMAllocator const& a)
3527  {
3528  return !operator==(a);
3529  }
3530 }; // class SVMAllocator return cl::pointer<T>(tmp, detail::Deleter<T, Alloc>{alloc, copies});
3531 
3532 
3533 template<class SVMTrait>
3534 class SVMAllocator<void, SVMTrait> {
3535 public:
3536  typedef void value_type;
3537  typedef value_type* pointer;
3538  typedef const value_type* const_pointer;
3539 
3540  template<typename U>
3541  struct rebind
3542  {
3544  };
3545 
3546  template<typename U, typename V>
3547  friend class SVMAllocator;
3548 };
3549 
3550 #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
3551 namespace detail
3552 {
3553  template<class Alloc>
3554  class Deleter {
3555  private:
3556  Alloc alloc_;
3557  size_type copies_;
3558 
3559  public:
3560  typedef typename std::allocator_traits<Alloc>::pointer pointer;
3561 
3562  Deleter(const Alloc &alloc, size_type copies) : alloc_{ alloc }, copies_{ copies }
3563  {
3564  }
3565 
3566  void operator()(pointer ptr) const {
3567  Alloc tmpAlloc{ alloc_ };
3568  std::allocator_traits<Alloc>::destroy(tmpAlloc, std::addressof(*ptr));
3569  std::allocator_traits<Alloc>::deallocate(tmpAlloc, ptr, copies_);
3570  }
3571  };
3572 } // namespace detail
3573 
3580 template <class T, class Alloc, class... Args>
3581 cl::pointer<T, detail::Deleter<Alloc>> allocate_pointer(const Alloc &alloc_, Args&&... args)
3582 {
3583  Alloc alloc(alloc_);
3584  static const size_type copies = 1;
3585 
3586  // Ensure that creation of the management block and the
3587  // object are dealt with separately such that we only provide a deleter
3588 
3589  T* tmp = std::allocator_traits<Alloc>::allocate(alloc, copies);
3590  if (!tmp) {
3591  std::bad_alloc excep;
3592  throw excep;
3593  }
3594  try {
3595  std::allocator_traits<Alloc>::construct(
3596  alloc,
3597  std::addressof(*tmp),
3598  std::forward<Args>(args)...);
3599 
3600  return cl::pointer<T, detail::Deleter<Alloc>>(tmp, detail::Deleter<Alloc>{alloc, copies});
3601  }
3602  catch (std::bad_alloc b)
3603  {
3604  std::allocator_traits<Alloc>::deallocate(alloc, tmp, copies);
3605  throw;
3606  }
3607 }
3608 
3609 template< class T, class SVMTrait, class... Args >
3610 cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(Args... args)
3611 {
3612  SVMAllocator<T, SVMTrait> alloc;
3613  return cl::allocate_pointer<T>(alloc, args...);
3614 }
3615 
3616 template< class T, class SVMTrait, class... Args >
3617 cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(const cl::Context &c, Args... args)
3618 {
3619  SVMAllocator<T, SVMTrait> alloc(c);
3620  return cl::allocate_pointer<T>(alloc, args...);
3621 }
3622 #endif // #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
3623 
3627 template < class T >
3628 using coarse_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitCoarse<>>>;
3629 
3633 template < class T >
3634 using fine_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitFine<>>>;
3635 
3639 template < class T >
3640 using atomic_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitAtomic<>>>;
3641 
3642 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
3643 
3644 
3651 class Buffer : public Memory
3652 {
3653 public:
3654 
3663  const Context& context,
3664  cl_mem_flags flags,
3665  size_type size,
3666  void* host_ptr = NULL,
3667  cl_int* err = NULL)
3668  {
3669  cl_int error;
3670  object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
3671 
3672  detail::errHandler(error, __CREATE_BUFFER_ERR);
3673  if (err != NULL) {
3674  *err = error;
3675  }
3676  }
3677 
3688  cl_mem_flags flags,
3689  size_type size,
3690  void* host_ptr = NULL,
3691  cl_int* err = NULL)
3692  {
3693  cl_int error;
3694 
3695  Context context = Context::getDefault(err);
3696 
3697  object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
3698 
3699  detail::errHandler(error, __CREATE_BUFFER_ERR);
3700  if (err != NULL) {
3701  *err = error;
3702  }
3703  }
3704 
3710  template< typename IteratorType >
3712  IteratorType startIterator,
3713  IteratorType endIterator,
3714  bool readOnly,
3715  bool useHostPtr = false,
3716  cl_int* err = NULL)
3717  {
3718  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
3719  cl_int error;
3720 
3721  cl_mem_flags flags = 0;
3722  if( readOnly ) {
3723  flags |= CL_MEM_READ_ONLY;
3724  }
3725  else {
3726  flags |= CL_MEM_READ_WRITE;
3727  }
3728  if( useHostPtr ) {
3729  flags |= CL_MEM_USE_HOST_PTR;
3730  }
3731 
3732  size_type size = sizeof(DataType)*(endIterator - startIterator);
3733 
3734  Context context = Context::getDefault(err);
3735 
3736  if( useHostPtr ) {
3737  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
3738  } else {
3739  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
3740  }
3741 
3742  detail::errHandler(error, __CREATE_BUFFER_ERR);
3743  if (err != NULL) {
3744  *err = error;
3745  }
3746 
3747  if( !useHostPtr ) {
3748  error = cl::copy(startIterator, endIterator, *this);
3749  detail::errHandler(error, __CREATE_BUFFER_ERR);
3750  if (err != NULL) {
3751  *err = error;
3752  }
3753  }
3754  }
3755 
3761  template< typename IteratorType >
3762  Buffer(const Context &context, IteratorType startIterator, IteratorType endIterator,
3763  bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
3764 
3769  template< typename IteratorType >
3770  Buffer(const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator,
3771  bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
3772 
3774  Buffer() : Memory() { }
3775 
3783  explicit Buffer(const cl_mem& buffer, bool retainObject = false) :
3784  Memory(buffer, retainObject) { }
3785 
3790  Buffer& operator = (const cl_mem& rhs)
3791  {
3792  Memory::operator=(rhs);
3793  return *this;
3794  }
3795 
3799  Buffer(const Buffer& buf) : Memory(buf) {}
3800 
3805  {
3806  Memory::operator=(buf);
3807  return *this;
3808  }
3809 
3813  Buffer(Buffer&& buf) CL_HPP_NOEXCEPT_ : Memory(std::move(buf)) {}
3814 
3819  {
3820  Memory::operator=(std::move(buf));
3821  return *this;
3822  }
3823 
3824 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3825 
3830  cl_mem_flags flags,
3831  cl_buffer_create_type buffer_create_type,
3832  const void * buffer_create_info,
3833  cl_int * err = NULL)
3834  {
3835  Buffer result;
3836  cl_int error;
3837  result.object_ = ::clCreateSubBuffer(
3838  object_,
3839  flags,
3840  buffer_create_type,
3841  buffer_create_info,
3842  &error);
3843 
3844  detail::errHandler(error, __CREATE_SUBBUFFER_ERR);
3845  if (err != NULL) {
3846  *err = error;
3847  }
3848 
3849  return result;
3850  }
3851 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3852 };
3853 
3854 #if defined (CL_HPP_USE_DX_INTEROP)
3855 
3863 class BufferD3D10 : public Buffer
3864 {
3865 public:
3866 
3867 
3873  BufferD3D10(
3874  const Context& context,
3875  cl_mem_flags flags,
3876  ID3D10Buffer* bufobj,
3877  cl_int * err = NULL) : pfn_clCreateFromD3D10BufferKHR(nullptr)
3878  {
3879  typedef CL_API_ENTRY cl_mem (CL_API_CALL *PFN_clCreateFromD3D10BufferKHR)(
3880  cl_context context, cl_mem_flags flags, ID3D10Buffer* buffer,
3881  cl_int* errcode_ret);
3882  PFN_clCreateFromD3D10BufferKHR pfn_clCreateFromD3D10BufferKHR;
3883 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
3884  vector<cl_context_properties> props = context.getInfo<CL_CONTEXT_PROPERTIES>();
3885  cl_platform platform = -1;
3886  for( int i = 0; i < props.size(); ++i ) {
3887  if( props[i] == CL_CONTEXT_PLATFORM ) {
3888  platform = props[i+1];
3889  }
3890  }
3891  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clCreateFromD3D10BufferKHR);
3892 #elif CL_HPP_TARGET_OPENCL_VERSION >= 110
3893  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateFromD3D10BufferKHR);
3894 #endif
3895 
3896  cl_int error;
3897  object_ = pfn_clCreateFromD3D10BufferKHR(
3898  context(),
3899  flags,
3900  bufobj,
3901  &error);
3902 
3903  detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
3904  if (err != NULL) {
3905  *err = error;
3906  }
3907  }
3908 
3910  BufferD3D10() : Buffer() { }
3911 
3919  explicit BufferD3D10(const cl_mem& buffer, bool retainObject = false) :
3920  Buffer(buffer, retainObject) { }
3921 
3926  BufferD3D10& operator = (const cl_mem& rhs)
3927  {
3928  Buffer::operator=(rhs);
3929  return *this;
3930  }
3931 
3935  BufferD3D10(const BufferD3D10& buf) :
3936  Buffer(buf) {}
3937 
3941  BufferD3D10& operator = (const BufferD3D10 &buf)
3942  {
3943  Buffer::operator=(buf);
3944  return *this;
3945  }
3946 
3950  BufferD3D10(BufferD3D10&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
3951 
3955  BufferD3D10& operator = (BufferD3D10 &&buf)
3956  {
3957  Buffer::operator=(std::move(buf));
3958  return *this;
3959  }
3960 };
3961 #endif
3962 
3971 class BufferGL : public Buffer
3972 {
3973 public:
3980  const Context& context,
3981  cl_mem_flags flags,
3982  cl_GLuint bufobj,
3983  cl_int * err = NULL)
3984  {
3985  cl_int error;
3986  object_ = ::clCreateFromGLBuffer(
3987  context(),
3988  flags,
3989  bufobj,
3990  &error);
3991 
3992  detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
3993  if (err != NULL) {
3994  *err = error;
3995  }
3996  }
3997 
3999  BufferGL() : Buffer() { }
4000 
4008  explicit BufferGL(const cl_mem& buffer, bool retainObject = false) :
4009  Buffer(buffer, retainObject) { }
4010 
4015  BufferGL& operator = (const cl_mem& rhs)
4016  {
4017  Buffer::operator=(rhs);
4018  return *this;
4019  }
4020 
4024  BufferGL(const BufferGL& buf) : Buffer(buf) {}
4025 
4030  {
4031  Buffer::operator=(buf);
4032  return *this;
4033  }
4034 
4038  BufferGL(BufferGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4039 
4044  {
4045  Buffer::operator=(std::move(buf));
4046  return *this;
4047  }
4048 
4051  cl_gl_object_type *type,
4052  cl_GLuint * gl_object_name)
4053  {
4054  return detail::errHandler(
4055  ::clGetGLObjectInfo(object_,type,gl_object_name),
4056  __GET_GL_OBJECT_INFO_ERR);
4057  }
4058 };
4059 
4068 class BufferRenderGL : public Buffer
4069 {
4070 public:
4077  const Context& context,
4078  cl_mem_flags flags,
4079  cl_GLuint bufobj,
4080  cl_int * err = NULL)
4081  {
4082  cl_int error;
4083  object_ = ::clCreateFromGLRenderbuffer(
4084  context(),
4085  flags,
4086  bufobj,
4087  &error);
4088 
4089  detail::errHandler(error, __CREATE_GL_RENDER_BUFFER_ERR);
4090  if (err != NULL) {
4091  *err = error;
4092  }
4093  }
4094 
4097 
4105  explicit BufferRenderGL(const cl_mem& buffer, bool retainObject = false) :
4106  Buffer(buffer, retainObject) { }
4107 
4112  BufferRenderGL& operator = (const cl_mem& rhs)
4113  {
4114  Buffer::operator=(rhs);
4115  return *this;
4116  }
4117 
4121  BufferRenderGL(const BufferRenderGL& buf) : Buffer(buf) {}
4122 
4127  {
4128  Buffer::operator=(buf);
4129  return *this;
4130  }
4131 
4135  BufferRenderGL(BufferRenderGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4136 
4141  {
4142  Buffer::operator=(std::move(buf));
4143  return *this;
4144  }
4145 
4148  cl_gl_object_type *type,
4149  cl_GLuint * gl_object_name)
4150  {
4151  return detail::errHandler(
4152  ::clGetGLObjectInfo(object_,type,gl_object_name),
4153  __GET_GL_OBJECT_INFO_ERR);
4154  }
4155 };
4156 
4163 class Image : public Memory
4164 {
4165 protected:
4167  Image() : Memory() { }
4168 
4176  explicit Image(const cl_mem& image, bool retainObject = false) :
4177  Memory(image, retainObject) { }
4178 
4183  Image& operator = (const cl_mem& rhs)
4184  {
4185  Memory::operator=(rhs);
4186  return *this;
4187  }
4188 
4192  Image(const Image& img) : Memory(img) {}
4193 
4197  Image& operator = (const Image &img)
4198  {
4199  Memory::operator=(img);
4200  return *this;
4201  }
4202 
4206  Image(Image&& img) CL_HPP_NOEXCEPT_ : Memory(std::move(img)) {}
4207 
4212  {
4213  Memory::operator=(std::move(img));
4214  return *this;
4215  }
4216 
4217 
4218 public:
4220  template <typename T>
4221  cl_int getImageInfo(cl_image_info name, T* param) const
4222  {
4223  return detail::errHandler(
4224  detail::getInfo(&::clGetImageInfo, object_, name, param),
4225  __GET_IMAGE_INFO_ERR);
4226  }
4227 
4229  template <cl_int name> typename
4231  getImageInfo(cl_int* err = NULL) const
4232  {
4233  typename detail::param_traits<
4234  detail::cl_image_info, name>::param_type param;
4235  cl_int result = getImageInfo(name, &param);
4236  if (err != NULL) {
4237  *err = result;
4238  }
4239  return param;
4240  }
4241 };
4242 
4243 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4244 
4250 class Image1D : public Image
4251 {
4252 public:
4258  const Context& context,
4259  cl_mem_flags flags,
4260  ImageFormat format,
4261  size_type width,
4262  void* host_ptr = NULL,
4263  cl_int* err = NULL)
4264  {
4265  cl_int error;
4266  cl_image_desc desc =
4267  {
4268  CL_MEM_OBJECT_IMAGE1D,
4269  width,
4270  0, 0, 0, 0, 0, 0, 0, 0
4271  };
4272  object_ = ::clCreateImage(
4273  context(),
4274  flags,
4275  &format,
4276  &desc,
4277  host_ptr,
4278  &error);
4279 
4280  detail::errHandler(error, __CREATE_IMAGE_ERR);
4281  if (err != NULL) {
4282  *err = error;
4283  }
4284  }
4285 
4287  Image1D() { }
4288 
4296  explicit Image1D(const cl_mem& image1D, bool retainObject = false) :
4297  Image(image1D, retainObject) { }
4298 
4303  Image1D& operator = (const cl_mem& rhs)
4304  {
4305  Image::operator=(rhs);
4306  return *this;
4307  }
4308 
4312  Image1D(const Image1D& img) : Image(img) {}
4313 
4318  {
4319  Image::operator=(img);
4320  return *this;
4321  }
4322 
4326  Image1D(Image1D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4327 
4332  {
4333  Image::operator=(std::move(img));
4334  return *this;
4335  }
4336 
4337 };
4338 
4342 class Image1DBuffer : public Image
4343 {
4344 public:
4345  Image1DBuffer(
4346  const Context& context,
4347  cl_mem_flags flags,
4348  ImageFormat format,
4349  size_type width,
4350  const Buffer &buffer,
4351  cl_int* err = NULL)
4352  {
4353  cl_int error;
4354  cl_image_desc desc =
4355  {
4356  CL_MEM_OBJECT_IMAGE1D_BUFFER,
4357  width,
4358  0, 0, 0, 0, 0, 0, 0,
4359  buffer()
4360  };
4361  object_ = ::clCreateImage(
4362  context(),
4363  flags,
4364  &format,
4365  &desc,
4366  NULL,
4367  &error);
4368 
4369  detail::errHandler(error, __CREATE_IMAGE_ERR);
4370  if (err != NULL) {
4371  *err = error;
4372  }
4373  }
4374 
4375  Image1DBuffer() { }
4376 
4384  explicit Image1DBuffer(const cl_mem& image1D, bool retainObject = false) :
4385  Image(image1D, retainObject) { }
4386 
4387  Image1DBuffer& operator = (const cl_mem& rhs)
4388  {
4389  Image::operator=(rhs);
4390  return *this;
4391  }
4392 
4396  Image1DBuffer(const Image1DBuffer& img) : Image(img) {}
4397 
4402  {
4403  Image::operator=(img);
4404  return *this;
4405  }
4406 
4410  Image1DBuffer(Image1DBuffer&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4411 
4416  {
4417  Image::operator=(std::move(img));
4418  return *this;
4419  }
4420 
4421 };
4422 
4426 class Image1DArray : public Image
4427 {
4428 public:
4429  Image1DArray(
4430  const Context& context,
4431  cl_mem_flags flags,
4432  ImageFormat format,
4433  size_type arraySize,
4434  size_type width,
4435  size_type rowPitch,
4436  void* host_ptr = NULL,
4437  cl_int* err = NULL)
4438  {
4439  cl_int error;
4440  cl_image_desc desc =
4441  {
4442  CL_MEM_OBJECT_IMAGE1D_ARRAY,
4443  width,
4444  0, 0, // height, depth (unused)
4445  arraySize,
4446  rowPitch,
4447  0, 0, 0, 0
4448  };
4449  object_ = ::clCreateImage(
4450  context(),
4451  flags,
4452  &format,
4453  &desc,
4454  host_ptr,
4455  &error);
4456 
4457  detail::errHandler(error, __CREATE_IMAGE_ERR);
4458  if (err != NULL) {
4459  *err = error;
4460  }
4461  }
4462 
4463  Image1DArray() { }
4464 
4472  explicit Image1DArray(const cl_mem& imageArray, bool retainObject = false) :
4473  Image(imageArray, retainObject) { }
4474 
4475 
4476  Image1DArray& operator = (const cl_mem& rhs)
4477  {
4478  Image::operator=(rhs);
4479  return *this;
4480  }
4481 
4485  Image1DArray(const Image1DArray& img) : Image(img) {}
4486 
4491  {
4492  Image::operator=(img);
4493  return *this;
4494  }
4495 
4499  Image1DArray(Image1DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4500 
4505  {
4506  Image::operator=(std::move(img));
4507  return *this;
4508  }
4509 
4510 };
4511 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4512 
4513 
4520 class Image2D : public Image
4521 {
4522 public:
4528  const Context& context,
4529  cl_mem_flags flags,
4530  ImageFormat format,
4531  size_type width,
4532  size_type height,
4533  size_type row_pitch = 0,
4534  void* host_ptr = NULL,
4535  cl_int* err = NULL)
4536  {
4537  cl_int error;
4538  bool useCreateImage;
4539 
4540 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
4541  // Run-time decision based on the actual platform
4542  {
4543  cl_uint version = detail::getContextPlatformVersion(context());
4544  useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
4545  }
4546 #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
4547  useCreateImage = true;
4548 #else
4549  useCreateImage = false;
4550 #endif
4551 
4552 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4553  if (useCreateImage)
4554  {
4555  cl_image_desc desc =
4556  {
4557  CL_MEM_OBJECT_IMAGE2D,
4558  width,
4559  height,
4560  0, 0, // depth, array size (unused)
4561  row_pitch,
4562  0, 0, 0, 0
4563  };
4564  object_ = ::clCreateImage(
4565  context(),
4566  flags,
4567  &format,
4568  &desc,
4569  host_ptr,
4570  &error);
4571 
4572  detail::errHandler(error, __CREATE_IMAGE_ERR);
4573  if (err != NULL) {
4574  *err = error;
4575  }
4576  }
4577 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
4578 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
4579  if (!useCreateImage)
4580  {
4581  object_ = ::clCreateImage2D(
4582  context(), flags,&format, width, height, row_pitch, host_ptr, &error);
4583 
4584  detail::errHandler(error, __CREATE_IMAGE2D_ERR);
4585  if (err != NULL) {
4586  *err = error;
4587  }
4588  }
4589 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
4590  }
4591 
4592 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
4593 
4599  const Context& context,
4600  ImageFormat format,
4601  const Buffer &sourceBuffer,
4602  size_type width,
4603  size_type height,
4604  size_type row_pitch = 0,
4605  cl_int* err = nullptr)
4606  {
4607  cl_int error;
4608 
4609  cl_image_desc desc =
4610  {
4611  CL_MEM_OBJECT_IMAGE2D,
4612  width,
4613  height,
4614  0, 0, // depth, array size (unused)
4615  row_pitch,
4616  0, 0, 0,
4617  // Use buffer as input to image
4618  sourceBuffer()
4619  };
4620  object_ = ::clCreateImage(
4621  context(),
4622  0, // flags inherited from buffer
4623  &format,
4624  &desc,
4625  nullptr,
4626  &error);
4627 
4628  detail::errHandler(error, __CREATE_IMAGE_ERR);
4629  if (err != nullptr) {
4630  *err = error;
4631  }
4632  }
4633 #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200
4634 
4635 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
4636 
4649  const Context& context,
4650  cl_channel_order order,
4651  const Image &sourceImage,
4652  cl_int* err = nullptr)
4653  {
4654  cl_int error;
4655 
4656  // Descriptor fields have to match source image
4657  size_type sourceWidth =
4658  sourceImage.getImageInfo<CL_IMAGE_WIDTH>();
4659  size_type sourceHeight =
4660  sourceImage.getImageInfo<CL_IMAGE_HEIGHT>();
4661  size_type sourceRowPitch =
4662  sourceImage.getImageInfo<CL_IMAGE_ROW_PITCH>();
4663  cl_uint sourceNumMIPLevels =
4664  sourceImage.getImageInfo<CL_IMAGE_NUM_MIP_LEVELS>();
4665  cl_uint sourceNumSamples =
4666  sourceImage.getImageInfo<CL_IMAGE_NUM_SAMPLES>();
4667  cl_image_format sourceFormat =
4668  sourceImage.getImageInfo<CL_IMAGE_FORMAT>();
4669 
4670  // Update only the channel order.
4671  // Channel format inherited from source.
4672  sourceFormat.image_channel_order = order;
4673  cl_image_desc desc =
4674  {
4675  CL_MEM_OBJECT_IMAGE2D,
4676  sourceWidth,
4677  sourceHeight,
4678  0, 0, // depth (unused), array size (unused)
4679  sourceRowPitch,
4680  0, // slice pitch (unused)
4681  sourceNumMIPLevels,
4682  sourceNumSamples,
4683  // Use buffer as input to image
4684  sourceImage()
4685  };
4686  object_ = ::clCreateImage(
4687  context(),
4688  0, // flags should be inherited from mem_object
4689  &sourceFormat,
4690  &desc,
4691  nullptr,
4692  &error);
4693 
4694  detail::errHandler(error, __CREATE_IMAGE_ERR);
4695  if (err != nullptr) {
4696  *err = error;
4697  }
4698  }
4699 #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200
4700 
4702  Image2D() { }
4703 
4711  explicit Image2D(const cl_mem& image2D, bool retainObject = false) :
4712  Image(image2D, retainObject) { }
4713 
4718  Image2D& operator = (const cl_mem& rhs)
4719  {
4720  Image::operator=(rhs);
4721  return *this;
4722  }
4723 
4727  Image2D(const Image2D& img) : Image(img) {}
4728 
4733  {
4734  Image::operator=(img);
4735  return *this;
4736  }
4737 
4741  Image2D(Image2D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4742 
4747  {
4748  Image::operator=(std::move(img));
4749  return *this;
4750  }
4751 
4752 };
4753 
4754 
4755 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
4756 
4765 class CL_EXT_PREFIX__VERSION_1_1_DEPRECATED Image2DGL : public Image2D
4766 {
4767 public:
4774  const Context& context,
4775  cl_mem_flags flags,
4776  cl_GLenum target,
4777  cl_GLint miplevel,
4778  cl_GLuint texobj,
4779  cl_int * err = NULL)
4780  {
4781  cl_int error;
4782  object_ = ::clCreateFromGLTexture2D(
4783  context(),
4784  flags,
4785  target,
4786  miplevel,
4787  texobj,
4788  &error);
4789 
4790  detail::errHandler(error, __CREATE_GL_TEXTURE_2D_ERR);
4791  if (err != NULL) {
4792  *err = error;
4793  }
4794 
4795  }
4796 
4798  Image2DGL() : Image2D() { }
4799 
4807  explicit Image2DGL(const cl_mem& image, bool retainObject = false) :
4808  Image2D(image, retainObject) { }
4809 
4814  Image2DGL& operator = (const cl_mem& rhs)
4815  {
4816  Image2D::operator=(rhs);
4817  return *this;
4818  }
4819 
4823  Image2DGL(const Image2DGL& img) : Image2D(img) {}
4824 
4829  {
4830  Image2D::operator=(img);
4831  return *this;
4832  }
4833 
4837  Image2DGL(Image2DGL&& img) CL_HPP_NOEXCEPT_ : Image2D(std::move(img)) {}
4838 
4843  {
4844  Image2D::operator=(std::move(img));
4845  return *this;
4846  }
4847 
4848 } CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
4849 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
4850 
4851 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4852 
4855 class Image2DArray : public Image
4856 {
4857 public:
4858  Image2DArray(
4859  const Context& context,
4860  cl_mem_flags flags,
4861  ImageFormat format,
4862  size_type arraySize,
4863  size_type width,
4864  size_type height,
4865  size_type rowPitch,
4866  size_type slicePitch,
4867  void* host_ptr = NULL,
4868  cl_int* err = NULL)
4869  {
4870  cl_int error;
4871  cl_image_desc desc =
4872  {
4873  CL_MEM_OBJECT_IMAGE2D_ARRAY,
4874  width,
4875  height,
4876  0, // depth (unused)
4877  arraySize,
4878  rowPitch,
4879  slicePitch,
4880  0, 0, 0
4881  };
4882  object_ = ::clCreateImage(
4883  context(),
4884  flags,
4885  &format,
4886  &desc,
4887  host_ptr,
4888  &error);
4889 
4890  detail::errHandler(error, __CREATE_IMAGE_ERR);
4891  if (err != NULL) {
4892  *err = error;
4893  }
4894  }
4895 
4896  Image2DArray() { }
4897 
4905  explicit Image2DArray(const cl_mem& imageArray, bool retainObject = false) : Image(imageArray, retainObject) { }
4906 
4907  Image2DArray& operator = (const cl_mem& rhs)
4908  {
4909  Image::operator=(rhs);
4910  return *this;
4911  }
4912 
4916  Image2DArray(const Image2DArray& img) : Image(img) {}
4917 
4922  {
4923  Image::operator=(img);
4924  return *this;
4925  }
4926 
4930  Image2DArray(Image2DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4931 
4936  {
4937  Image::operator=(std::move(img));
4938  return *this;
4939  }
4940 };
4941 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4942 
4949 class Image3D : public Image
4950 {
4951 public:
4957  const Context& context,
4958  cl_mem_flags flags,
4959  ImageFormat format,
4960  size_type width,
4961  size_type height,
4962  size_type depth,
4963  size_type row_pitch = 0,
4964  size_type slice_pitch = 0,
4965  void* host_ptr = NULL,
4966  cl_int* err = NULL)
4967  {
4968  cl_int error;
4969  bool useCreateImage;
4970 
4971 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
4972  // Run-time decision based on the actual platform
4973  {
4974  cl_uint version = detail::getContextPlatformVersion(context());
4975  useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
4976  }
4977 #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
4978  useCreateImage = true;
4979 #else
4980  useCreateImage = false;
4981 #endif
4982 
4983 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4984  if (useCreateImage)
4985  {
4986  cl_image_desc desc =
4987  {
4988  CL_MEM_OBJECT_IMAGE3D,
4989  width,
4990  height,
4991  depth,
4992  0, // array size (unused)
4993  row_pitch,
4994  slice_pitch,
4995  0, 0, 0
4996  };
4997  object_ = ::clCreateImage(
4998  context(),
4999  flags,
5000  &format,
5001  &desc,
5002  host_ptr,
5003  &error);
5004 
5005  detail::errHandler(error, __CREATE_IMAGE_ERR);
5006  if (err != NULL) {
5007  *err = error;
5008  }
5009  }
5010 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5011 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
5012  if (!useCreateImage)
5013  {
5014  object_ = ::clCreateImage3D(
5015  context(), flags, &format, width, height, depth, row_pitch,
5016  slice_pitch, host_ptr, &error);
5017 
5018  detail::errHandler(error, __CREATE_IMAGE3D_ERR);
5019  if (err != NULL) {
5020  *err = error;
5021  }
5022  }
5023 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
5024  }
5025 
5027  Image3D() : Image() { }
5028 
5036  explicit Image3D(const cl_mem& image3D, bool retainObject = false) :
5037  Image(image3D, retainObject) { }
5038 
5043  Image3D& operator = (const cl_mem& rhs)
5044  {
5045  Image::operator=(rhs);
5046  return *this;
5047  }
5048 
5052  Image3D(const Image3D& img) : Image(img) {}
5053 
5058  {
5059  Image::operator=(img);
5060  return *this;
5061  }
5062 
5066  Image3D(Image3D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5067 
5072  {
5073  Image::operator=(std::move(img));
5074  return *this;
5075  }
5076 };
5077 
5078 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
5079 
5087 class Image3DGL : public Image3D
5088 {
5089 public:
5096  const Context& context,
5097  cl_mem_flags flags,
5098  cl_GLenum target,
5099  cl_GLint miplevel,
5100  cl_GLuint texobj,
5101  cl_int * err = NULL)
5102  {
5103  cl_int error;
5104  object_ = ::clCreateFromGLTexture3D(
5105  context(),
5106  flags,
5107  target,
5108  miplevel,
5109  texobj,
5110  &error);
5111 
5112  detail::errHandler(error, __CREATE_GL_TEXTURE_3D_ERR);
5113  if (err != NULL) {
5114  *err = error;
5115  }
5116  }
5117 
5119  Image3DGL() : Image3D() { }
5120 
5128  explicit Image3DGL(const cl_mem& image, bool retainObject = false) :
5129  Image3D(image, retainObject) { }
5130 
5135  Image3DGL& operator = (const cl_mem& rhs)
5136  {
5137  Image3D::operator=(rhs);
5138  return *this;
5139  }
5140 
5144  Image3DGL(const Image3DGL& img) : Image3D(img) {}
5145 
5150  {
5151  Image3D::operator=(img);
5152  return *this;
5153  }
5154 
5158  Image3DGL(Image3DGL&& img) CL_HPP_NOEXCEPT_ : Image3D(std::move(img)) {}
5159 
5164  {
5165  Image3D::operator=(std::move(img));
5166  return *this;
5167  }
5168 };
5169 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
5170 
5171 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5172 
5178 class ImageGL : public Image
5179 {
5180 public:
5181  ImageGL(
5182  const Context& context,
5183  cl_mem_flags flags,
5184  cl_GLenum target,
5185  cl_GLint miplevel,
5186  cl_GLuint texobj,
5187  cl_int * err = NULL)
5188  {
5189  cl_int error;
5190  object_ = ::clCreateFromGLTexture(
5191  context(),
5192  flags,
5193  target,
5194  miplevel,
5195  texobj,
5196  &error);
5197 
5198  detail::errHandler(error, __CREATE_GL_TEXTURE_ERR);
5199  if (err != NULL) {
5200  *err = error;
5201  }
5202  }
5203 
5204  ImageGL() : Image() { }
5205 
5213  explicit ImageGL(const cl_mem& image, bool retainObject = false) :
5214  Image(image, retainObject) { }
5215 
5216  ImageGL& operator = (const cl_mem& rhs)
5217  {
5218  Image::operator=(rhs);
5219  return *this;
5220  }
5221 
5225  ImageGL(const ImageGL& img) : Image(img) {}
5226 
5231  {
5232  Image::operator=(img);
5233  return *this;
5234  }
5235 
5239  ImageGL(ImageGL&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5240 
5245  {
5246  Image::operator=(std::move(img));
5247  return *this;
5248  }
5249 };
5250 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5251 
5252 
5253 
5254 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5255 
5261 class Pipe : public Memory
5262 {
5263 public:
5264 
5275  const Context& context,
5276  cl_uint packet_size,
5277  cl_uint max_packets,
5278  cl_int* err = NULL)
5279  {
5280  cl_int error;
5281 
5282  cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
5283  object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
5284 
5285  detail::errHandler(error, __CREATE_PIPE_ERR);
5286  if (err != NULL) {
5287  *err = error;
5288  }
5289  }
5290 
5300  cl_uint packet_size,
5301  cl_uint max_packets,
5302  cl_int* err = NULL)
5303  {
5304  cl_int error;
5305 
5306  Context context = Context::getDefault(err);
5307 
5308  cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
5309  object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
5310 
5311  detail::errHandler(error, __CREATE_PIPE_ERR);
5312  if (err != NULL) {
5313  *err = error;
5314  }
5315  }
5316 
5318  Pipe() : Memory() { }
5319 
5327  explicit Pipe(const cl_mem& pipe, bool retainObject = false) :
5328  Memory(pipe, retainObject) { }
5329 
5334  Pipe& operator = (const cl_mem& rhs)
5335  {
5336  Memory::operator=(rhs);
5337  return *this;
5338  }
5339 
5343  Pipe(const Pipe& pipe) : Memory(pipe) {}
5344 
5348  Pipe& operator = (const Pipe &pipe)
5349  {
5350  Memory::operator=(pipe);
5351  return *this;
5352  }
5353 
5357  Pipe(Pipe&& pipe) CL_HPP_NOEXCEPT_ : Memory(std::move(pipe)) {}
5358 
5363  {
5364  Memory::operator=(std::move(pipe));
5365  return *this;
5366  }
5367 
5369  template <typename T>
5370  cl_int getInfo(cl_pipe_info name, T* param) const
5371  {
5372  return detail::errHandler(
5373  detail::getInfo(&::clGetPipeInfo, object_, name, param),
5374  __GET_PIPE_INFO_ERR);
5375  }
5376 
5378  template <cl_int name> typename
5380  getInfo(cl_int* err = NULL) const
5381  {
5382  typename detail::param_traits<
5383  detail::cl_pipe_info, name>::param_type param;
5384  cl_int result = getInfo(name, &param);
5385  if (err != NULL) {
5386  *err = result;
5387  }
5388  return param;
5389  }
5390 }; // class Pipe
5391 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
5392 
5393 
5402 class Sampler : public detail::Wrapper<cl_sampler>
5403 {
5404 public:
5406  Sampler() { }
5407 
5413  const Context& context,
5414  cl_bool normalized_coords,
5415  cl_addressing_mode addressing_mode,
5416  cl_filter_mode filter_mode,
5417  cl_int* err = NULL)
5418  {
5419  cl_int error;
5420 
5421 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5422  cl_sampler_properties sampler_properties[] = {
5423  CL_SAMPLER_NORMALIZED_COORDS, normalized_coords,
5424  CL_SAMPLER_ADDRESSING_MODE, addressing_mode,
5425  CL_SAMPLER_FILTER_MODE, filter_mode,
5426  0 };
5427  object_ = ::clCreateSamplerWithProperties(
5428  context(),
5429  sampler_properties,
5430  &error);
5431 
5432  detail::errHandler(error, __CREATE_SAMPLER_WITH_PROPERTIES_ERR);
5433  if (err != NULL) {
5434  *err = error;
5435  }
5436 #else
5437  object_ = ::clCreateSampler(
5438  context(),
5439  normalized_coords,
5440  addressing_mode,
5441  filter_mode,
5442  &error);
5443 
5444  detail::errHandler(error, __CREATE_SAMPLER_ERR);
5445  if (err != NULL) {
5446  *err = error;
5447  }
5448 #endif
5449  }
5450 
5459  explicit Sampler(const cl_sampler& sampler, bool retainObject = false) :
5460  detail::Wrapper<cl_type>(sampler, retainObject) { }
5461 
5467  Sampler& operator = (const cl_sampler& rhs)
5468  {
5470  return *this;
5471  }
5472 
5476  Sampler(const Sampler& sam) : detail::Wrapper<cl_type>(sam) {}
5477 
5482  {
5484  return *this;
5485  }
5486 
5490  Sampler(Sampler&& sam) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(sam)) {}
5491 
5496  {
5497  detail::Wrapper<cl_type>::operator=(std::move(sam));
5498  return *this;
5499  }
5500 
5502  template <typename T>
5503  cl_int getInfo(cl_sampler_info name, T* param) const
5504  {
5505  return detail::errHandler(
5506  detail::getInfo(&::clGetSamplerInfo, object_, name, param),
5507  __GET_SAMPLER_INFO_ERR);
5508  }
5509 
5511  template <cl_int name> typename
5513  getInfo(cl_int* err = NULL) const
5514  {
5515  typename detail::param_traits<
5516  detail::cl_sampler_info, name>::param_type param;
5517  cl_int result = getInfo(name, &param);
5518  if (err != NULL) {
5519  *err = result;
5520  }
5521  return param;
5522  }
5523 };
5524 
5525 class Program;
5526 class CommandQueue;
5527 class DeviceCommandQueue;
5528 class Kernel;
5529 
5531 class NDRange
5532 {
5533 private:
5534  size_type sizes_[3];
5535  cl_uint dimensions_;
5536 
5537 public:
5540  : dimensions_(0)
5541  {
5542  sizes_[0] = 0;
5543  sizes_[1] = 0;
5544  sizes_[2] = 0;
5545  }
5546 
5548  NDRange(size_type size0)
5549  : dimensions_(1)
5550  {
5551  sizes_[0] = size0;
5552  sizes_[1] = 1;
5553  sizes_[2] = 1;
5554  }
5555 
5557  NDRange(size_type size0, size_type size1)
5558  : dimensions_(2)
5559  {
5560  sizes_[0] = size0;
5561  sizes_[1] = size1;
5562  sizes_[2] = 1;
5563  }
5564 
5566  NDRange(size_type size0, size_type size1, size_type size2)
5567  : dimensions_(3)
5568  {
5569  sizes_[0] = size0;
5570  sizes_[1] = size1;
5571  sizes_[2] = size2;
5572  }
5573 
5578  operator const size_type*() const {
5579  return sizes_;
5580  }
5581 
5583  size_type dimensions() const
5584  {
5585  return dimensions_;
5586  }
5587 
5589  // runtime number of dimensions
5590  size_type size() const
5591  {
5592  return dimensions_*sizeof(size_type);
5593  }
5594 
5595  size_type* get()
5596  {
5597  return sizes_;
5598  }
5599 
5600  const size_type* get() const
5601  {
5602  return sizes_;
5603  }
5604 };
5605 
5607 static const NDRange NullRange;
5608 
5611 {
5612  size_type size_;
5613 };
5614 
5615 namespace detail {
5616 
5617 template <typename T, class Enable = void>
5619 
5620 // Enable for objects that are not subclasses of memory
5621 // Pointers, constants etc
5622 template <typename T>
5623 struct KernelArgumentHandler<T, typename std::enable_if<!std::is_base_of<cl::Memory, T>::value>::type>
5624 {
5625  static size_type size(const T&) { return sizeof(T); }
5626  static const T* ptr(const T& value) { return &value; }
5627 };
5628 
5629 // Enable for subclasses of memory where we want to get a reference to the cl_mem out
5630 // and pass that in for safety
5631 template <typename T>
5632 struct KernelArgumentHandler<T, typename std::enable_if<std::is_base_of<cl::Memory, T>::value>::type>
5633 {
5634  static size_type size(const T&) { return sizeof(cl_mem); }
5635  static const cl_mem* ptr(const T& value) { return &(value()); }
5636 };
5637 
5638 // Specialization for DeviceCommandQueue defined later
5639 
5640 template <>
5642 {
5643  static size_type size(const LocalSpaceArg& value) { return value.size_; }
5644  static const void* ptr(const LocalSpaceArg&) { return NULL; }
5645 };
5646 
5647 }
5649 
5653 inline LocalSpaceArg
5654 Local(size_type size)
5655 {
5656  LocalSpaceArg ret = { size };
5657  return ret;
5658 }
5659 
5668 class Kernel : public detail::Wrapper<cl_kernel>
5669 {
5670 public:
5671  inline Kernel(const Program& program, const char* name, cl_int* err = NULL);
5672 
5674  Kernel() { }
5675 
5684  explicit Kernel(const cl_kernel& kernel, bool retainObject = false) :
5685  detail::Wrapper<cl_type>(kernel, retainObject) { }
5686 
5692  Kernel& operator = (const cl_kernel& rhs)
5693  {
5695  return *this;
5696  }
5697 
5701  Kernel(const Kernel& kernel) : detail::Wrapper<cl_type>(kernel) {}
5702 
5706  Kernel& operator = (const Kernel &kernel)
5707  {
5709  return *this;
5710  }
5711 
5715  Kernel(Kernel&& kernel) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(kernel)) {}
5716 
5721  {
5722  detail::Wrapper<cl_type>::operator=(std::move(kernel));
5723  return *this;
5724  }
5725 
5726  template <typename T>
5727  cl_int getInfo(cl_kernel_info name, T* param) const
5728  {
5729  return detail::errHandler(
5730  detail::getInfo(&::clGetKernelInfo, object_, name, param),
5731  __GET_KERNEL_INFO_ERR);
5732  }
5733 
5734  template <cl_int name> typename
5735  detail::param_traits<detail::cl_kernel_info, name>::param_type
5736  getInfo(cl_int* err = NULL) const
5737  {
5738  typename detail::param_traits<
5739  detail::cl_kernel_info, name>::param_type param;
5740  cl_int result = getInfo(name, &param);
5741  if (err != NULL) {
5742  *err = result;
5743  }
5744  return param;
5745  }
5746 
5747 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5748  template <typename T>
5749  cl_int getArgInfo(cl_uint argIndex, cl_kernel_arg_info name, T* param) const
5750  {
5751  return detail::errHandler(
5752  detail::getInfo(&::clGetKernelArgInfo, object_, argIndex, name, param),
5753  __GET_KERNEL_ARG_INFO_ERR);
5754  }
5755 
5756  template <cl_int name> typename
5757  detail::param_traits<detail::cl_kernel_arg_info, name>::param_type
5758  getArgInfo(cl_uint argIndex, cl_int* err = NULL) const
5759  {
5760  typename detail::param_traits<
5761  detail::cl_kernel_arg_info, name>::param_type param;
5762  cl_int result = getArgInfo(argIndex, name, &param);
5763  if (err != NULL) {
5764  *err = result;
5765  }
5766  return param;
5767  }
5768 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5769 
5770  template <typename T>
5771  cl_int getWorkGroupInfo(
5772  const Device& device, cl_kernel_work_group_info name, T* param) const
5773  {
5774  return detail::errHandler(
5775  detail::getInfo(
5776  &::clGetKernelWorkGroupInfo, object_, device(), name, param),
5777  __GET_KERNEL_WORK_GROUP_INFO_ERR);
5778  }
5779 
5780  template <cl_int name> typename
5781  detail::param_traits<detail::cl_kernel_work_group_info, name>::param_type
5782  getWorkGroupInfo(const Device& device, cl_int* err = NULL) const
5783  {
5784  typename detail::param_traits<
5785  detail::cl_kernel_work_group_info, name>::param_type param;
5786  cl_int result = getWorkGroupInfo(device, name, &param);
5787  if (err != NULL) {
5788  *err = result;
5789  }
5790  return param;
5791  }
5792 
5793 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5794 #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
5795  cl_int getSubGroupInfo(const cl::Device &dev, cl_kernel_sub_group_info name, const cl::NDRange &range, size_type* param) const
5796  {
5797  typedef clGetKernelSubGroupInfoKHR_fn PFN_clGetKernelSubGroupInfoKHR;
5798  static PFN_clGetKernelSubGroupInfoKHR pfn_clGetKernelSubGroupInfoKHR = NULL;
5799  CL_HPP_INIT_CL_EXT_FCN_PTR_(clGetKernelSubGroupInfoKHR);
5800 
5801  return detail::errHandler(
5802  pfn_clGetKernelSubGroupInfoKHR(object_, dev(), name, range.size(), range.get(), sizeof(size_type), param, nullptr),
5803  __GET_KERNEL_ARG_INFO_ERR);
5804  }
5805 
5806  template <cl_int name>
5807  size_type getSubGroupInfo(const cl::Device &dev, const cl::NDRange &range, cl_int* err = NULL) const
5808  {
5809  size_type param;
5810  cl_int result = getSubGroupInfo(dev, name, range, &param);
5811  if (err != NULL) {
5812  *err = result;
5813  }
5814  return param;
5815  }
5816 #endif // #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
5817 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5818 
5819 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5820 
5822  template<typename T, class D>
5823  cl_int setArg(cl_uint index, const cl::pointer<T, D> &argPtr)
5824  {
5825  return detail::errHandler(
5826  ::clSetKernelArgSVMPointer(object_, index, argPtr.get()),
5827  __SET_KERNEL_ARGS_ERR);
5828  }
5829 
5832  template<typename T, class Alloc>
5833  cl_int setArg(cl_uint index, const cl::vector<T, Alloc> &argPtr)
5834  {
5835  return detail::errHandler(
5836  ::clSetKernelArgSVMPointer(object_, index, argPtr.data()),
5837  __SET_KERNEL_ARGS_ERR);
5838  }
5839 
5842  template<typename T>
5843  typename std::enable_if<std::is_pointer<T>::value, cl_int>::type
5844  setArg(cl_uint index, const T argPtr)
5845  {
5846  return detail::errHandler(
5847  ::clSetKernelArgSVMPointer(object_, index, argPtr),
5848  __SET_KERNEL_ARGS_ERR);
5849  }
5850 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5851 
5854  template <typename T>
5855  typename std::enable_if<!std::is_pointer<T>::value, cl_int>::type
5856  setArg(cl_uint index, const T &value)
5857  {
5858  return detail::errHandler(
5859  ::clSetKernelArg(
5860  object_,
5861  index,
5864  __SET_KERNEL_ARGS_ERR);
5865  }
5866 
5867  cl_int setArg(cl_uint index, size_type size, const void* argPtr)
5868  {
5869  return detail::errHandler(
5870  ::clSetKernelArg(object_, index, size, argPtr),
5871  __SET_KERNEL_ARGS_ERR);
5872  }
5873 
5874 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5875 
5879  cl_int setSVMPointers(const vector<void*> &pointerList)
5880  {
5881  return detail::errHandler(
5882  ::clSetKernelExecInfo(
5883  object_,
5884  CL_KERNEL_EXEC_INFO_SVM_PTRS,
5885  sizeof(void*)*pointerList.size(),
5886  pointerList.data()));
5887  }
5888 
5893  template<int ArrayLength>
5894  cl_int setSVMPointers(const std::array<void*, ArrayLength> &pointerList)
5895  {
5896  return detail::errHandler(
5897  ::clSetKernelExecInfo(
5898  object_,
5899  CL_KERNEL_EXEC_INFO_SVM_PTRS,
5900  sizeof(void*)*pointerList.size(),
5901  pointerList.data()));
5902  }
5903 
5915  cl_int enableFineGrainedSystemSVM(bool svmEnabled)
5916  {
5917  cl_bool svmEnabled_ = svmEnabled ? CL_TRUE : CL_FALSE;
5918  return detail::errHandler(
5919  ::clSetKernelExecInfo(
5920  object_,
5921  CL_KERNEL_EXEC_INFO_SVM_FINE_GRAIN_SYSTEM,
5922  sizeof(cl_bool),
5923  &svmEnabled_
5924  )
5925  );
5926  }
5927 
5928  template<int index, int ArrayLength, class D, typename T0, typename... Ts>
5929  void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0, Ts... ts)
5930  {
5931  pointerList[index] = static_cast<void*>(t0.get());
5932  setSVMPointersHelper<index + 1, Ts...>(ts...);
5933  }
5934 
5935  template<int index, int ArrayLength, typename T0, typename... Ts>
5936  typename std::enable_if<std::is_pointer<T0>::value, void>::type
5937  setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0, Ts... ts)
5938  {
5939  pointerList[index] = static_cast<void*>(t0);
5940  setSVMPointersHelper<index + 1, Ts...>(ts...);
5941  }
5942 
5943  template<int index, int ArrayLength, typename T0, class D>
5944  void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0)
5945  {
5946  pointerList[index] = static_cast<void*>(t0.get());
5947  }
5948 
5949  template<int index, int ArrayLength, typename T0>
5950  typename std::enable_if<std::is_pointer<T0>::value, void>::type
5951  setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0)
5952  {
5953  pointerList[index] = static_cast<void*>(t0);
5954  }
5955 
5956  template<typename T0, typename... Ts>
5957  cl_int setSVMPointers(const T0 &t0, Ts... ts)
5958  {
5959  std::array<void*, 1 + sizeof...(Ts)> pointerList;
5960 
5961  setSVMPointersHelper<0, 1 + sizeof...(Ts)>(pointerList, t0, ts...);
5962  return detail::errHandler(
5963  ::clSetKernelExecInfo(
5964  object_,
5965  CL_KERNEL_EXEC_INFO_SVM_PTRS,
5966  sizeof(void*)*(1 + sizeof...(Ts)),
5967  pointerList.data()));
5968  }
5969 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5970 };
5971 
5975 class Program : public detail::Wrapper<cl_program>
5976 {
5977 public:
5978 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
5979  typedef vector<vector<unsigned char>> Binaries;
5980  typedef vector<string> Sources;
5981 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
5982  typedef vector<std::pair<const void*, size_type> > Binaries;
5983  typedef vector<std::pair<const char*, size_type> > Sources;
5984 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
5985 
5986  Program(
5987  const string& source,
5988  bool build = false,
5989  cl_int* err = NULL)
5990  {
5991  cl_int error;
5992 
5993  const char * strings = source.c_str();
5994  const size_type length = source.size();
5995 
5996  Context context = Context::getDefault(err);
5997 
5998  object_ = ::clCreateProgramWithSource(
5999  context(), (cl_uint)1, &strings, &length, &error);
6000 
6001  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6002 
6003  if (error == CL_SUCCESS && build) {
6004 
6005  error = ::clBuildProgram(
6006  object_,
6007  0,
6008  NULL,
6009 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6010  "-cl-std=CL2.0",
6011 #else
6012  "",
6013 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6014  NULL,
6015  NULL);
6016 
6017  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6018  }
6019 
6020  if (err != NULL) {
6021  *err = error;
6022  }
6023  }
6024 
6025  Program(
6026  const Context& context,
6027  const string& source,
6028  bool build = false,
6029  cl_int* err = NULL)
6030  {
6031  cl_int error;
6032 
6033  const char * strings = source.c_str();
6034  const size_type length = source.size();
6035 
6036  object_ = ::clCreateProgramWithSource(
6037  context(), (cl_uint)1, &strings, &length, &error);
6038 
6039  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6040 
6041  if (error == CL_SUCCESS && build) {
6042  error = ::clBuildProgram(
6043  object_,
6044  0,
6045  NULL,
6046 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6047  "-cl-std=CL2.0",
6048 #else
6049  "",
6050 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6051  NULL,
6052  NULL);
6053 
6054  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6055  }
6056 
6057  if (err != NULL) {
6058  *err = error;
6059  }
6060  }
6061 
6067  const Sources& sources,
6068  cl_int* err = NULL)
6069  {
6070  cl_int error;
6071  Context context = Context::getDefault(err);
6072 
6073  const size_type n = (size_type)sources.size();
6074 
6075  vector<size_type> lengths(n);
6076  vector<const char*> strings(n);
6077 
6078  for (size_type i = 0; i < n; ++i) {
6079 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6080  strings[i] = sources[(int)i].data();
6081  lengths[i] = sources[(int)i].length();
6082 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6083  strings[i] = sources[(int)i].first;
6084  lengths[i] = sources[(int)i].second;
6085 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6086  }
6087 
6088  object_ = ::clCreateProgramWithSource(
6089  context(), (cl_uint)n, strings.data(), lengths.data(), &error);
6090 
6091  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6092  if (err != NULL) {
6093  *err = error;
6094  }
6095  }
6096 
6102  const Context& context,
6103  const Sources& sources,
6104  cl_int* err = NULL)
6105  {
6106  cl_int error;
6107 
6108  const size_type n = (size_type)sources.size();
6109 
6110  vector<size_type> lengths(n);
6111  vector<const char*> strings(n);
6112 
6113  for (size_type i = 0; i < n; ++i) {
6114 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6115  strings[i] = sources[(int)i].data();
6116  lengths[i] = sources[(int)i].length();
6117 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6118  strings[i] = sources[(int)i].first;
6119  lengths[i] = sources[(int)i].second;
6120 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6121  }
6122 
6123  object_ = ::clCreateProgramWithSource(
6124  context(), (cl_uint)n, strings.data(), lengths.data(), &error);
6125 
6126  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6127  if (err != NULL) {
6128  *err = error;
6129  }
6130  }
6131 
6152  const Context& context,
6153  const vector<Device>& devices,
6154  const Binaries& binaries,
6155  vector<cl_int>* binaryStatus = NULL,
6156  cl_int* err = NULL)
6157  {
6158  cl_int error;
6159 
6160  const size_type numDevices = devices.size();
6161 
6162  // Catch size mismatch early and return
6163  if(binaries.size() != numDevices) {
6164  error = CL_INVALID_VALUE;
6165  detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
6166  if (err != NULL) {
6167  *err = error;
6168  }
6169  return;
6170  }
6171 
6172 
6173  vector<size_type> lengths(numDevices);
6174  vector<const unsigned char*> images(numDevices);
6175 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6176  for (size_type i = 0; i < numDevices; ++i) {
6177  images[i] = binaries[i].data();
6178  lengths[i] = binaries[(int)i].size();
6179  }
6180 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6181  for (size_type i = 0; i < numDevices; ++i) {
6182  images[i] = (const unsigned char*)binaries[i].first;
6183  lengths[i] = binaries[(int)i].second;
6184  }
6185 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6186 
6187  vector<cl_device_id> deviceIDs(numDevices);
6188  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6189  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6190  }
6191 
6192  if(binaryStatus) {
6193  binaryStatus->resize(numDevices);
6194  }
6195 
6196  object_ = ::clCreateProgramWithBinary(
6197  context(), (cl_uint) devices.size(),
6198  deviceIDs.data(),
6199  lengths.data(), images.data(), (binaryStatus != NULL && numDevices > 0)
6200  ? &binaryStatus->front()
6201  : NULL, &error);
6202 
6203  detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
6204  if (err != NULL) {
6205  *err = error;
6206  }
6207  }
6208 
6209 
6210 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6211 
6216  const Context& context,
6217  const vector<Device>& devices,
6218  const string& kernelNames,
6219  cl_int* err = NULL)
6220  {
6221  cl_int error;
6222 
6223 
6224  size_type numDevices = devices.size();
6225  vector<cl_device_id> deviceIDs(numDevices);
6226  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6227  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6228  }
6229 
6230  object_ = ::clCreateProgramWithBuiltInKernels(
6231  context(),
6232  (cl_uint) devices.size(),
6233  deviceIDs.data(),
6234  kernelNames.c_str(),
6235  &error);
6236 
6237  detail::errHandler(error, __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR);
6238  if (err != NULL) {
6239  *err = error;
6240  }
6241  }
6242 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6243 
6244  Program() { }
6245 
6246 
6253  explicit Program(const cl_program& program, bool retainObject = false) :
6254  detail::Wrapper<cl_type>(program, retainObject) { }
6255 
6256  Program& operator = (const cl_program& rhs)
6257  {
6259  return *this;
6260  }
6261 
6265  Program(const Program& program) : detail::Wrapper<cl_type>(program) {}
6266 
6270  Program& operator = (const Program &program)
6271  {
6273  return *this;
6274  }
6275 
6279  Program(Program&& program) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(program)) {}
6280 
6285  {
6286  detail::Wrapper<cl_type>::operator=(std::move(program));
6287  return *this;
6288  }
6289 
6290  cl_int build(
6291  const vector<Device>& devices,
6292  const char* options = NULL,
6293  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6294  void* data = NULL) const
6295  {
6296  size_type numDevices = devices.size();
6297  vector<cl_device_id> deviceIDs(numDevices);
6298 
6299  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6300  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6301  }
6302 
6303  cl_int buildError = ::clBuildProgram(
6304  object_,
6305  (cl_uint)
6306  devices.size(),
6307  deviceIDs.data(),
6308  options,
6309  notifyFptr,
6310  data);
6311 
6312  return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6313  }
6314 
6315  cl_int build(
6316  const char* options = NULL,
6317  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6318  void* data = NULL) const
6319  {
6320  cl_int buildError = ::clBuildProgram(
6321  object_,
6322  0,
6323  NULL,
6324  options,
6325  notifyFptr,
6326  data);
6327 
6328 
6329  return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6330  }
6331 
6332 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6333  cl_int compile(
6334  const char* options = NULL,
6335  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6336  void* data = NULL) const
6337  {
6338  cl_int error = ::clCompileProgram(
6339  object_,
6340  0,
6341  NULL,
6342  options,
6343  0,
6344  NULL,
6345  NULL,
6346  notifyFptr,
6347  data);
6348  return detail::buildErrHandler(error, __COMPILE_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6349  }
6350 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6351 
6352  template <typename T>
6353  cl_int getInfo(cl_program_info name, T* param) const
6354  {
6355  return detail::errHandler(
6356  detail::getInfo(&::clGetProgramInfo, object_, name, param),
6357  __GET_PROGRAM_INFO_ERR);
6358  }
6359 
6360  template <cl_int name> typename
6361  detail::param_traits<detail::cl_program_info, name>::param_type
6362  getInfo(cl_int* err = NULL) const
6363  {
6364  typename detail::param_traits<
6365  detail::cl_program_info, name>::param_type param;
6366  cl_int result = getInfo(name, &param);
6367  if (err != NULL) {
6368  *err = result;
6369  }
6370  return param;
6371  }
6372 
6373  template <typename T>
6374  cl_int getBuildInfo(
6375  const Device& device, cl_program_build_info name, T* param) const
6376  {
6377  return detail::errHandler(
6378  detail::getInfo(
6379  &::clGetProgramBuildInfo, object_, device(), name, param),
6380  __GET_PROGRAM_BUILD_INFO_ERR);
6381  }
6382 
6383  template <cl_int name> typename
6384  detail::param_traits<detail::cl_program_build_info, name>::param_type
6385  getBuildInfo(const Device& device, cl_int* err = NULL) const
6386  {
6387  typename detail::param_traits<
6388  detail::cl_program_build_info, name>::param_type param;
6389  cl_int result = getBuildInfo(device, name, &param);
6390  if (err != NULL) {
6391  *err = result;
6392  }
6393  return param;
6394  }
6395 
6401  template <cl_int name>
6402  vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
6403  getBuildInfo(cl_int *err = NULL) const
6404  {
6405  cl_int result = CL_SUCCESS;
6406 
6407  auto devs = getInfo<CL_PROGRAM_DEVICES>(&result);
6408  vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
6409  devInfo;
6410 
6411  // If there was an initial error from getInfo return the error
6412  if (result != CL_SUCCESS) {
6413  if (err != NULL) {
6414  *err = result;
6415  }
6416  return devInfo;
6417  }
6418 
6419  for (cl::Device d : devs) {
6420  typename detail::param_traits<
6421  detail::cl_program_build_info, name>::param_type param;
6422  result = getBuildInfo(d, name, &param);
6423  devInfo.push_back(
6425  (d, param));
6426  if (result != CL_SUCCESS) {
6427  // On error, leave the loop and return the error code
6428  break;
6429  }
6430  }
6431  if (err != NULL) {
6432  *err = result;
6433  }
6434  if (result != CL_SUCCESS) {
6435  devInfo.clear();
6436  }
6437  return devInfo;
6438  }
6439 
6440  cl_int createKernels(vector<Kernel>* kernels)
6441  {
6442  cl_uint numKernels;
6443  cl_int err = ::clCreateKernelsInProgram(object_, 0, NULL, &numKernels);
6444  if (err != CL_SUCCESS) {
6445  return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
6446  }
6447 
6448  vector<cl_kernel> value(numKernels);
6449 
6450  err = ::clCreateKernelsInProgram(
6451  object_, numKernels, value.data(), NULL);
6452  if (err != CL_SUCCESS) {
6453  return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
6454  }
6455 
6456  if (kernels) {
6457  kernels->resize(value.size());
6458 
6459  // Assign to param, constructing with retain behaviour
6460  // to correctly capture each underlying CL object
6461  for (size_type i = 0; i < value.size(); i++) {
6462  // We do not need to retain because this kernel is being created
6463  // by the runtime
6464  (*kernels)[i] = Kernel(value[i], false);
6465  }
6466  }
6467  return CL_SUCCESS;
6468  }
6469 };
6470 
6471 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6472 inline Program linkProgram(
6473  Program input1,
6474  Program input2,
6475  const char* options = NULL,
6476  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6477  void* data = NULL,
6478  cl_int* err = NULL)
6479 {
6480  cl_int error_local = CL_SUCCESS;
6481 
6482  cl_program programs[2] = { input1(), input2() };
6483 
6484  Context ctx = input1.getInfo<CL_PROGRAM_CONTEXT>(&error_local);
6485  if(error_local!=CL_SUCCESS) {
6486  detail::errHandler(error_local, __LINK_PROGRAM_ERR);
6487  }
6488 
6489  cl_program prog = ::clLinkProgram(
6490  ctx(),
6491  0,
6492  NULL,
6493  options,
6494  2,
6495  programs,
6496  notifyFptr,
6497  data,
6498  &error_local);
6499 
6500  detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
6501  if (err != NULL) {
6502  *err = error_local;
6503  }
6504 
6505  return Program(prog);
6506 }
6507 
6508 inline Program linkProgram(
6509  vector<Program> inputPrograms,
6510  const char* options = NULL,
6511  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6512  void* data = NULL,
6513  cl_int* err = NULL)
6514 {
6515  cl_int error_local = CL_SUCCESS;
6516 
6517  vector<cl_program> programs(inputPrograms.size());
6518 
6519  for (unsigned int i = 0; i < inputPrograms.size(); i++) {
6520  programs[i] = inputPrograms[i]();
6521  }
6522 
6523  Context ctx;
6524  if(inputPrograms.size() > 0) {
6525  ctx = inputPrograms[0].getInfo<CL_PROGRAM_CONTEXT>(&error_local);
6526  if(error_local!=CL_SUCCESS) {
6527  detail::errHandler(error_local, __LINK_PROGRAM_ERR);
6528  }
6529  }
6530  cl_program prog = ::clLinkProgram(
6531  ctx(),
6532  0,
6533  NULL,
6534  options,
6535  (cl_uint)inputPrograms.size(),
6536  programs.data(),
6537  notifyFptr,
6538  data,
6539  &error_local);
6540 
6541  detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
6542  if (err != NULL) {
6543  *err = error_local;
6544  }
6545 
6546  return Program(prog, false);
6547 }
6548 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6549 
6550 // Template specialization for CL_PROGRAM_BINARIES
6551 template <>
6552 inline cl_int cl::Program::getInfo(cl_program_info name, vector<vector<unsigned char>>* param) const
6553 {
6554  if (name != CL_PROGRAM_BINARIES) {
6555  return CL_INVALID_VALUE;
6556  }
6557  if (param) {
6558  // Resize the parameter array appropriately for each allocation
6559  // and pass down to the helper
6560 
6561  vector<size_type> sizes = getInfo<CL_PROGRAM_BINARY_SIZES>();
6562  size_type numBinaries = sizes.size();
6563 
6564  // Resize the parameter array and constituent arrays
6565  param->resize(numBinaries);
6566  for (size_type i = 0; i < numBinaries; ++i) {
6567  (*param)[i].resize(sizes[i]);
6568  }
6569 
6570  return detail::errHandler(
6571  detail::getInfo(&::clGetProgramInfo, object_, name, param),
6572  __GET_PROGRAM_INFO_ERR);
6573  }
6574 
6575  return CL_SUCCESS;
6576 }
6577 
6578 template<>
6579 inline vector<vector<unsigned char>> cl::Program::getInfo<CL_PROGRAM_BINARIES>(cl_int* err) const
6580 {
6581  vector<vector<unsigned char>> binariesVectors;
6582 
6583  cl_int result = getInfo(CL_PROGRAM_BINARIES, &binariesVectors);
6584  if (err != NULL) {
6585  *err = result;
6586  }
6587  return binariesVectors;
6588 }
6589 
6590 inline Kernel::Kernel(const Program& program, const char* name, cl_int* err)
6591 {
6592  cl_int error;
6593 
6594  object_ = ::clCreateKernel(program(), name, &error);
6595  detail::errHandler(error, __CREATE_KERNEL_ERR);
6596 
6597  if (err != NULL) {
6598  *err = error;
6599  }
6600 
6601 }
6602 
6603 enum class QueueProperties : cl_command_queue_properties
6604 {
6605  None = 0,
6606  Profiling = CL_QUEUE_PROFILING_ENABLE,
6607  OutOfOrder = CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE,
6608 };
6609 
6610 inline QueueProperties operator|(QueueProperties lhs, QueueProperties rhs)
6611 {
6612  return static_cast<QueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
6613 }
6614 
6618 class CommandQueue : public detail::Wrapper<cl_command_queue>
6619 {
6620 private:
6621  static std::once_flag default_initialized_;
6622  static CommandQueue default_;
6623  static cl_int default_error_;
6624 
6630  static void makeDefault()
6631  {
6632  /* We don't want to throw an error from this function, so we have to
6633  * catch and set the error flag.
6634  */
6635 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
6636  try
6637 #endif
6638  {
6639  int error;
6640  Context context = Context::getDefault(&error);
6641 
6642  if (error != CL_SUCCESS) {
6643  default_error_ = error;
6644  }
6645  else {
6646  Device device = Device::getDefault();
6647  default_ = CommandQueue(context, device, 0, &default_error_);
6648  }
6649  }
6650 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
6651  catch (cl::Error &e) {
6652  default_error_ = e.err();
6653  }
6654 #endif
6655  }
6656 
6662  static void makeDefaultProvided(const CommandQueue &c) {
6663  default_ = c;
6664  }
6665 
6666 public:
6667 #ifdef CL_HPP_UNIT_TEST_ENABLE
6668 
6674  static void unitTestClearDefault() {
6675  default_ = CommandQueue();
6676  }
6677 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
6678 
6679 
6685  cl_command_queue_properties properties,
6686  cl_int* err = NULL)
6687  {
6688  cl_int error;
6689 
6690  Context context = Context::getDefault(&error);
6691  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6692 
6693  if (error != CL_SUCCESS) {
6694  if (err != NULL) {
6695  *err = error;
6696  }
6697  }
6698  else {
6699  Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
6700 
6701 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6702  cl_queue_properties queue_properties[] = {
6703  CL_QUEUE_PROPERTIES, properties, 0 };
6704  if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
6705  object_ = ::clCreateCommandQueueWithProperties(
6706  context(), device(), queue_properties, &error);
6707  }
6708  else {
6709  error = CL_INVALID_QUEUE_PROPERTIES;
6710  }
6711 
6712  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6713  if (err != NULL) {
6714  *err = error;
6715  }
6716 #else
6717  object_ = ::clCreateCommandQueue(
6718  context(), device(), properties, &error);
6719 
6720  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6721  if (err != NULL) {
6722  *err = error;
6723  }
6724 #endif
6725  }
6726  }
6727 
6733  QueueProperties properties,
6734  cl_int* err = NULL)
6735  {
6736  cl_int error;
6737 
6738  Context context = Context::getDefault(&error);
6739  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6740 
6741  if (error != CL_SUCCESS) {
6742  if (err != NULL) {
6743  *err = error;
6744  }
6745  }
6746  else {
6747  Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
6748 
6749 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6750  cl_queue_properties queue_properties[] = {
6751  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
6752 
6753  object_ = ::clCreateCommandQueueWithProperties(
6754  context(), device(), queue_properties, &error);
6755 
6756 
6757  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6758  if (err != NULL) {
6759  *err = error;
6760  }
6761 #else
6762  object_ = ::clCreateCommandQueue(
6763  context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
6764 
6765  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6766  if (err != NULL) {
6767  *err = error;
6768  }
6769 #endif
6770  }
6771  }
6772 
6777  explicit CommandQueue(
6778  const Context& context,
6779  cl_command_queue_properties properties = 0,
6780  cl_int* err = NULL)
6781  {
6782  cl_int error;
6783  vector<cl::Device> devices;
6784  error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
6785 
6786  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6787 
6788  if (error != CL_SUCCESS)
6789  {
6790  if (err != NULL) {
6791  *err = error;
6792  }
6793  return;
6794  }
6795 
6796 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6797  cl_queue_properties queue_properties[] = {
6798  CL_QUEUE_PROPERTIES, properties, 0 };
6799  if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
6800  object_ = ::clCreateCommandQueueWithProperties(
6801  context(), devices[0](), queue_properties, &error);
6802  }
6803  else {
6804  error = CL_INVALID_QUEUE_PROPERTIES;
6805  }
6806 
6807  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6808  if (err != NULL) {
6809  *err = error;
6810  }
6811 #else
6812  object_ = ::clCreateCommandQueue(
6813  context(), devices[0](), properties, &error);
6814 
6815  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6816  if (err != NULL) {
6817  *err = error;
6818  }
6819 #endif
6820 
6821  }
6822 
6827  explicit CommandQueue(
6828  const Context& context,
6829  QueueProperties properties,
6830  cl_int* err = NULL)
6831  {
6832  cl_int error;
6833  vector<cl::Device> devices;
6834  error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
6835 
6836  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6837 
6838  if (error != CL_SUCCESS)
6839  {
6840  if (err != NULL) {
6841  *err = error;
6842  }
6843  return;
6844  }
6845 
6846 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6847  cl_queue_properties queue_properties[] = {
6848  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
6849  object_ = ::clCreateCommandQueueWithProperties(
6850  context(), devices[0](), queue_properties, &error);
6851 
6852  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6853  if (err != NULL) {
6854  *err = error;
6855  }
6856 #else
6857  object_ = ::clCreateCommandQueue(
6858  context(), devices[0](), static_cast<cl_command_queue_properties>(properties), &error);
6859 
6860  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6861  if (err != NULL) {
6862  *err = error;
6863  }
6864 #endif
6865 
6866  }
6867 
6873  const Context& context,
6874  const Device& device,
6875  cl_command_queue_properties properties = 0,
6876  cl_int* err = NULL)
6877  {
6878  cl_int error;
6879 
6880 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6881  cl_queue_properties queue_properties[] = {
6882  CL_QUEUE_PROPERTIES, properties, 0 };
6883  object_ = ::clCreateCommandQueueWithProperties(
6884  context(), device(), queue_properties, &error);
6885 
6886  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6887  if (err != NULL) {
6888  *err = error;
6889  }
6890 #else
6891  object_ = ::clCreateCommandQueue(
6892  context(), device(), properties, &error);
6893 
6894  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6895  if (err != NULL) {
6896  *err = error;
6897  }
6898 #endif
6899  }
6900 
6906  const Context& context,
6907  const Device& device,
6908  QueueProperties properties,
6909  cl_int* err = NULL)
6910  {
6911  cl_int error;
6912 
6913 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6914  cl_queue_properties queue_properties[] = {
6915  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
6916  object_ = ::clCreateCommandQueueWithProperties(
6917  context(), device(), queue_properties, &error);
6918 
6919  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6920  if (err != NULL) {
6921  *err = error;
6922  }
6923 #else
6924  object_ = ::clCreateCommandQueue(
6925  context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
6926 
6927  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6928  if (err != NULL) {
6929  *err = error;
6930  }
6931 #endif
6932  }
6933 
6934  static CommandQueue getDefault(cl_int * err = NULL)
6935  {
6936  std::call_once(default_initialized_, makeDefault);
6937 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6938  detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6939 #else // CL_HPP_TARGET_OPENCL_VERSION >= 200
6940  detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_ERR);
6941 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
6942  if (err != NULL) {
6943  *err = default_error_;
6944  }
6945  return default_;
6946  }
6947 
6955  static CommandQueue setDefault(const CommandQueue &default_queue)
6956  {
6957  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_queue));
6958  detail::errHandler(default_error_);
6959  return default_;
6960  }
6961 
6962  CommandQueue() { }
6963 
6964 
6971  explicit CommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
6972  detail::Wrapper<cl_type>(commandQueue, retainObject) { }
6973 
6974  CommandQueue& operator = (const cl_command_queue& rhs)
6975  {
6977  return *this;
6978  }
6979 
6983  CommandQueue(const CommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
6984 
6989  {
6991  return *this;
6992  }
6993 
6997  CommandQueue(CommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
6998 
7003  {
7004  detail::Wrapper<cl_type>::operator=(std::move(queue));
7005  return *this;
7006  }
7007 
7008  template <typename T>
7009  cl_int getInfo(cl_command_queue_info name, T* param) const
7010  {
7011  return detail::errHandler(
7012  detail::getInfo(
7013  &::clGetCommandQueueInfo, object_, name, param),
7014  __GET_COMMAND_QUEUE_INFO_ERR);
7015  }
7016 
7017  template <cl_int name> typename
7018  detail::param_traits<detail::cl_command_queue_info, name>::param_type
7019  getInfo(cl_int* err = NULL) const
7020  {
7021  typename detail::param_traits<
7022  detail::cl_command_queue_info, name>::param_type param;
7023  cl_int result = getInfo(name, &param);
7024  if (err != NULL) {
7025  *err = result;
7026  }
7027  return param;
7028  }
7029 
7030  cl_int enqueueReadBuffer(
7031  const Buffer& buffer,
7032  cl_bool blocking,
7033  size_type offset,
7034  size_type size,
7035  void* ptr,
7036  const vector<Event>* events = NULL,
7037  Event* event = NULL) const
7038  {
7039  cl_event tmp;
7040  cl_int err = detail::errHandler(
7041  ::clEnqueueReadBuffer(
7042  object_, buffer(), blocking, offset, size,
7043  ptr,
7044  (events != NULL) ? (cl_uint) events->size() : 0,
7045  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7046  (event != NULL) ? &tmp : NULL),
7047  __ENQUEUE_READ_BUFFER_ERR);
7048 
7049  if (event != NULL && err == CL_SUCCESS)
7050  *event = tmp;
7051 
7052  return err;
7053  }
7054 
7055  cl_int enqueueWriteBuffer(
7056  const Buffer& buffer,
7057  cl_bool blocking,
7058  size_type offset,
7059  size_type size,
7060  const void* ptr,
7061  const vector<Event>* events = NULL,
7062  Event* event = NULL) const
7063  {
7064  cl_event tmp;
7065  cl_int err = detail::errHandler(
7066  ::clEnqueueWriteBuffer(
7067  object_, buffer(), blocking, offset, size,
7068  ptr,
7069  (events != NULL) ? (cl_uint) events->size() : 0,
7070  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7071  (event != NULL) ? &tmp : NULL),
7072  __ENQUEUE_WRITE_BUFFER_ERR);
7073 
7074  if (event != NULL && err == CL_SUCCESS)
7075  *event = tmp;
7076 
7077  return err;
7078  }
7079 
7080  cl_int enqueueCopyBuffer(
7081  const Buffer& src,
7082  const Buffer& dst,
7083  size_type src_offset,
7084  size_type dst_offset,
7085  size_type size,
7086  const vector<Event>* events = NULL,
7087  Event* event = NULL) const
7088  {
7089  cl_event tmp;
7090  cl_int err = detail::errHandler(
7091  ::clEnqueueCopyBuffer(
7092  object_, src(), dst(), src_offset, dst_offset, size,
7093  (events != NULL) ? (cl_uint) events->size() : 0,
7094  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7095  (event != NULL) ? &tmp : NULL),
7096  __ENQEUE_COPY_BUFFER_ERR);
7097 
7098  if (event != NULL && err == CL_SUCCESS)
7099  *event = tmp;
7100 
7101  return err;
7102  }
7103 
7104  cl_int enqueueReadBufferRect(
7105  const Buffer& buffer,
7106  cl_bool blocking,
7107  const array<size_type, 3>& buffer_offset,
7108  const array<size_type, 3>& host_offset,
7109  const array<size_type, 3>& region,
7110  size_type buffer_row_pitch,
7111  size_type buffer_slice_pitch,
7112  size_type host_row_pitch,
7113  size_type host_slice_pitch,
7114  void *ptr,
7115  const vector<Event>* events = NULL,
7116  Event* event = NULL) const
7117  {
7118  cl_event tmp;
7119  cl_int err = detail::errHandler(
7120  ::clEnqueueReadBufferRect(
7121  object_,
7122  buffer(),
7123  blocking,
7124  buffer_offset.data(),
7125  host_offset.data(),
7126  region.data(),
7127  buffer_row_pitch,
7128  buffer_slice_pitch,
7129  host_row_pitch,
7130  host_slice_pitch,
7131  ptr,
7132  (events != NULL) ? (cl_uint) events->size() : 0,
7133  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7134  (event != NULL) ? &tmp : NULL),
7135  __ENQUEUE_READ_BUFFER_RECT_ERR);
7136 
7137  if (event != NULL && err == CL_SUCCESS)
7138  *event = tmp;
7139 
7140  return err;
7141  }
7142 
7143  cl_int enqueueWriteBufferRect(
7144  const Buffer& buffer,
7145  cl_bool blocking,
7146  const array<size_type, 3>& buffer_offset,
7147  const array<size_type, 3>& host_offset,
7148  const array<size_type, 3>& region,
7149  size_type buffer_row_pitch,
7150  size_type buffer_slice_pitch,
7151  size_type host_row_pitch,
7152  size_type host_slice_pitch,
7153  const void *ptr,
7154  const vector<Event>* events = NULL,
7155  Event* event = NULL) const
7156  {
7157  cl_event tmp;
7158  cl_int err = detail::errHandler(
7159  ::clEnqueueWriteBufferRect(
7160  object_,
7161  buffer(),
7162  blocking,
7163  buffer_offset.data(),
7164  host_offset.data(),
7165  region.data(),
7166  buffer_row_pitch,
7167  buffer_slice_pitch,
7168  host_row_pitch,
7169  host_slice_pitch,
7170  ptr,
7171  (events != NULL) ? (cl_uint) events->size() : 0,
7172  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7173  (event != NULL) ? &tmp : NULL),
7174  __ENQUEUE_WRITE_BUFFER_RECT_ERR);
7175 
7176  if (event != NULL && err == CL_SUCCESS)
7177  *event = tmp;
7178 
7179  return err;
7180  }
7181 
7182  cl_int enqueueCopyBufferRect(
7183  const Buffer& src,
7184  const Buffer& dst,
7185  const array<size_type, 3>& src_origin,
7186  const array<size_type, 3>& dst_origin,
7187  const array<size_type, 3>& region,
7188  size_type src_row_pitch,
7189  size_type src_slice_pitch,
7190  size_type dst_row_pitch,
7191  size_type dst_slice_pitch,
7192  const vector<Event>* events = NULL,
7193  Event* event = NULL) const
7194  {
7195  cl_event tmp;
7196  cl_int err = detail::errHandler(
7197  ::clEnqueueCopyBufferRect(
7198  object_,
7199  src(),
7200  dst(),
7201  src_origin.data(),
7202  dst_origin.data(),
7203  region.data(),
7204  src_row_pitch,
7205  src_slice_pitch,
7206  dst_row_pitch,
7207  dst_slice_pitch,
7208  (events != NULL) ? (cl_uint) events->size() : 0,
7209  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7210  (event != NULL) ? &tmp : NULL),
7211  __ENQEUE_COPY_BUFFER_RECT_ERR);
7212 
7213  if (event != NULL && err == CL_SUCCESS)
7214  *event = tmp;
7215 
7216  return err;
7217  }
7218 
7219 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7220 
7231  template<typename PatternType>
7233  const Buffer& buffer,
7234  PatternType pattern,
7235  size_type offset,
7236  size_type size,
7237  const vector<Event>* events = NULL,
7238  Event* event = NULL) const
7239  {
7240  cl_event tmp;
7241  cl_int err = detail::errHandler(
7242  ::clEnqueueFillBuffer(
7243  object_,
7244  buffer(),
7245  static_cast<void*>(&pattern),
7246  sizeof(PatternType),
7247  offset,
7248  size,
7249  (events != NULL) ? (cl_uint) events->size() : 0,
7250  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7251  (event != NULL) ? &tmp : NULL),
7252  __ENQUEUE_FILL_BUFFER_ERR);
7253 
7254  if (event != NULL && err == CL_SUCCESS)
7255  *event = tmp;
7256 
7257  return err;
7258  }
7259 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7260 
7261  cl_int enqueueReadImage(
7262  const Image& image,
7263  cl_bool blocking,
7264  const array<size_type, 3>& origin,
7265  const array<size_type, 3>& region,
7266  size_type row_pitch,
7267  size_type slice_pitch,
7268  void* ptr,
7269  const vector<Event>* events = NULL,
7270  Event* event = NULL) const
7271  {
7272  cl_event tmp;
7273  cl_int err = detail::errHandler(
7274  ::clEnqueueReadImage(
7275  object_,
7276  image(),
7277  blocking,
7278  origin.data(),
7279  region.data(),
7280  row_pitch,
7281  slice_pitch,
7282  ptr,
7283  (events != NULL) ? (cl_uint) events->size() : 0,
7284  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7285  (event != NULL) ? &tmp : NULL),
7286  __ENQUEUE_READ_IMAGE_ERR);
7287 
7288  if (event != NULL && err == CL_SUCCESS)
7289  *event = tmp;
7290 
7291  return err;
7292  }
7293 
7294  cl_int enqueueWriteImage(
7295  const Image& image,
7296  cl_bool blocking,
7297  const array<size_type, 3>& origin,
7298  const array<size_type, 3>& region,
7299  size_type row_pitch,
7300  size_type slice_pitch,
7301  const void* ptr,
7302  const vector<Event>* events = NULL,
7303  Event* event = NULL) const
7304  {
7305  cl_event tmp;
7306  cl_int err = detail::errHandler(
7307  ::clEnqueueWriteImage(
7308  object_,
7309  image(),
7310  blocking,
7311  origin.data(),
7312  region.data(),
7313  row_pitch,
7314  slice_pitch,
7315  ptr,
7316  (events != NULL) ? (cl_uint) events->size() : 0,
7317  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7318  (event != NULL) ? &tmp : NULL),
7319  __ENQUEUE_WRITE_IMAGE_ERR);
7320 
7321  if (event != NULL && err == CL_SUCCESS)
7322  *event = tmp;
7323 
7324  return err;
7325  }
7326 
7327  cl_int enqueueCopyImage(
7328  const Image& src,
7329  const Image& dst,
7330  const array<size_type, 3>& src_origin,
7331  const array<size_type, 3>& dst_origin,
7332  const array<size_type, 3>& region,
7333  const vector<Event>* events = NULL,
7334  Event* event = NULL) const
7335  {
7336  cl_event tmp;
7337  cl_int err = detail::errHandler(
7338  ::clEnqueueCopyImage(
7339  object_,
7340  src(),
7341  dst(),
7342  src_origin.data(),
7343  dst_origin.data(),
7344  region.data(),
7345  (events != NULL) ? (cl_uint) events->size() : 0,
7346  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7347  (event != NULL) ? &tmp : NULL),
7348  __ENQUEUE_COPY_IMAGE_ERR);
7349 
7350  if (event != NULL && err == CL_SUCCESS)
7351  *event = tmp;
7352 
7353  return err;
7354  }
7355 
7356 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7357 
7365  const Image& image,
7366  cl_float4 fillColor,
7367  const array<size_type, 3>& origin,
7368  const array<size_type, 3>& region,
7369  const vector<Event>* events = NULL,
7370  Event* event = NULL) const
7371  {
7372  cl_event tmp;
7373  cl_int err = detail::errHandler(
7374  ::clEnqueueFillImage(
7375  object_,
7376  image(),
7377  static_cast<void*>(&fillColor),
7378  origin.data(),
7379  region.data(),
7380  (events != NULL) ? (cl_uint) events->size() : 0,
7381  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7382  (event != NULL) ? &tmp : NULL),
7383  __ENQUEUE_FILL_IMAGE_ERR);
7384 
7385  if (event != NULL && err == CL_SUCCESS)
7386  *event = tmp;
7387 
7388  return err;
7389  }
7390 
7399  const Image& image,
7400  cl_int4 fillColor,
7401  const array<size_type, 3>& origin,
7402  const array<size_type, 3>& region,
7403  const vector<Event>* events = NULL,
7404  Event* event = NULL) const
7405  {
7406  cl_event tmp;
7407  cl_int err = detail::errHandler(
7408  ::clEnqueueFillImage(
7409  object_,
7410  image(),
7411  static_cast<void*>(&fillColor),
7412  origin.data(),
7413  region.data(),
7414  (events != NULL) ? (cl_uint) events->size() : 0,
7415  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7416  (event != NULL) ? &tmp : NULL),
7417  __ENQUEUE_FILL_IMAGE_ERR);
7418 
7419  if (event != NULL && err == CL_SUCCESS)
7420  *event = tmp;
7421 
7422  return err;
7423  }
7424 
7433  const Image& image,
7434  cl_uint4 fillColor,
7435  const array<size_type, 3>& origin,
7436  const array<size_type, 3>& region,
7437  const vector<Event>* events = NULL,
7438  Event* event = NULL) const
7439  {
7440  cl_event tmp;
7441  cl_int err = detail::errHandler(
7442  ::clEnqueueFillImage(
7443  object_,
7444  image(),
7445  static_cast<void*>(&fillColor),
7446  origin.data(),
7447  region.data(),
7448  (events != NULL) ? (cl_uint) events->size() : 0,
7449  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7450  (event != NULL) ? &tmp : NULL),
7451  __ENQUEUE_FILL_IMAGE_ERR);
7452 
7453  if (event != NULL && err == CL_SUCCESS)
7454  *event = tmp;
7455 
7456  return err;
7457  }
7458 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7459 
7460  cl_int enqueueCopyImageToBuffer(
7461  const Image& src,
7462  const Buffer& dst,
7463  const array<size_type, 3>& src_origin,
7464  const array<size_type, 3>& region,
7465  size_type dst_offset,
7466  const vector<Event>* events = NULL,
7467  Event* event = NULL) const
7468  {
7469  cl_event tmp;
7470  cl_int err = detail::errHandler(
7471  ::clEnqueueCopyImageToBuffer(
7472  object_,
7473  src(),
7474  dst(),
7475  src_origin.data(),
7476  region.data(),
7477  dst_offset,
7478  (events != NULL) ? (cl_uint) events->size() : 0,
7479  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7480  (event != NULL) ? &tmp : NULL),
7481  __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR);
7482 
7483  if (event != NULL && err == CL_SUCCESS)
7484  *event = tmp;
7485 
7486  return err;
7487  }
7488 
7489  cl_int enqueueCopyBufferToImage(
7490  const Buffer& src,
7491  const Image& dst,
7492  size_type src_offset,
7493  const array<size_type, 3>& dst_origin,
7494  const array<size_type, 3>& region,
7495  const vector<Event>* events = NULL,
7496  Event* event = NULL) const
7497  {
7498  cl_event tmp;
7499  cl_int err = detail::errHandler(
7500  ::clEnqueueCopyBufferToImage(
7501  object_,
7502  src(),
7503  dst(),
7504  src_offset,
7505  dst_origin.data(),
7506  region.data(),
7507  (events != NULL) ? (cl_uint) events->size() : 0,
7508  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7509  (event != NULL) ? &tmp : NULL),
7510  __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR);
7511 
7512  if (event != NULL && err == CL_SUCCESS)
7513  *event = tmp;
7514 
7515  return err;
7516  }
7517 
7518  void* enqueueMapBuffer(
7519  const Buffer& buffer,
7520  cl_bool blocking,
7521  cl_map_flags flags,
7522  size_type offset,
7523  size_type size,
7524  const vector<Event>* events = NULL,
7525  Event* event = NULL,
7526  cl_int* err = NULL) const
7527  {
7528  cl_event tmp;
7529  cl_int error;
7530  void * result = ::clEnqueueMapBuffer(
7531  object_, buffer(), blocking, flags, offset, size,
7532  (events != NULL) ? (cl_uint) events->size() : 0,
7533  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7534  (event != NULL) ? &tmp : NULL,
7535  &error);
7536 
7537  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
7538  if (err != NULL) {
7539  *err = error;
7540  }
7541  if (event != NULL && error == CL_SUCCESS)
7542  *event = tmp;
7543 
7544  return result;
7545  }
7546 
7547  void* enqueueMapImage(
7548  const Image& buffer,
7549  cl_bool blocking,
7550  cl_map_flags flags,
7551  const array<size_type, 3>& origin,
7552  const array<size_type, 3>& region,
7553  size_type * row_pitch,
7554  size_type * slice_pitch,
7555  const vector<Event>* events = NULL,
7556  Event* event = NULL,
7557  cl_int* err = NULL) const
7558  {
7559  cl_event tmp;
7560  cl_int error;
7561  void * result = ::clEnqueueMapImage(
7562  object_, buffer(), blocking, flags,
7563  origin.data(),
7564  region.data(),
7565  row_pitch, slice_pitch,
7566  (events != NULL) ? (cl_uint) events->size() : 0,
7567  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7568  (event != NULL) ? &tmp : NULL,
7569  &error);
7570 
7571  detail::errHandler(error, __ENQUEUE_MAP_IMAGE_ERR);
7572  if (err != NULL) {
7573  *err = error;
7574  }
7575  if (event != NULL && error == CL_SUCCESS)
7576  *event = tmp;
7577  return result;
7578  }
7579 
7580 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7581 
7585  template<typename T>
7587  T* ptr,
7588  cl_bool blocking,
7589  cl_map_flags flags,
7590  size_type size,
7591  const vector<Event>* events = NULL,
7592  Event* event = NULL) const
7593  {
7594  cl_event tmp;
7595  cl_int err = detail::errHandler(::clEnqueueSVMMap(
7596  object_, blocking, flags, static_cast<void*>(ptr), size,
7597  (events != NULL) ? (cl_uint)events->size() : 0,
7598  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7599  (event != NULL) ? &tmp : NULL),
7600  __ENQUEUE_MAP_BUFFER_ERR);
7601 
7602  if (event != NULL && err == CL_SUCCESS)
7603  *event = tmp;
7604 
7605  return err;
7606  }
7607 
7608 
7613  template<typename T, class D>
7615  cl::pointer<T, D> &ptr,
7616  cl_bool blocking,
7617  cl_map_flags flags,
7618  size_type size,
7619  const vector<Event>* events = NULL,
7620  Event* event = NULL) const
7621  {
7622  cl_event tmp;
7623  cl_int err = detail::errHandler(::clEnqueueSVMMap(
7624  object_, blocking, flags, static_cast<void*>(ptr.get()), size,
7625  (events != NULL) ? (cl_uint)events->size() : 0,
7626  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7627  (event != NULL) ? &tmp : NULL),
7628  __ENQUEUE_MAP_BUFFER_ERR);
7629 
7630  if (event != NULL && err == CL_SUCCESS)
7631  *event = tmp;
7632 
7633  return err;
7634  }
7635 
7640  template<typename T, class Alloc>
7642  cl::vector<T, Alloc> &container,
7643  cl_bool blocking,
7644  cl_map_flags flags,
7645  const vector<Event>* events = NULL,
7646  Event* event = NULL) const
7647  {
7648  cl_event tmp;
7649  cl_int err = detail::errHandler(::clEnqueueSVMMap(
7650  object_, blocking, flags, static_cast<void*>(container.data()), container.size(),
7651  (events != NULL) ? (cl_uint)events->size() : 0,
7652  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7653  (event != NULL) ? &tmp : NULL),
7654  __ENQUEUE_MAP_BUFFER_ERR);
7655 
7656  if (event != NULL && err == CL_SUCCESS)
7657  *event = tmp;
7658 
7659  return err;
7660  }
7661 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7662 
7663  cl_int enqueueUnmapMemObject(
7664  const Memory& memory,
7665  void* mapped_ptr,
7666  const vector<Event>* events = NULL,
7667  Event* event = NULL) const
7668  {
7669  cl_event tmp;
7670  cl_int err = detail::errHandler(
7671  ::clEnqueueUnmapMemObject(
7672  object_, memory(), mapped_ptr,
7673  (events != NULL) ? (cl_uint) events->size() : 0,
7674  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7675  (event != NULL) ? &tmp : NULL),
7676  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7677 
7678  if (event != NULL && err == CL_SUCCESS)
7679  *event = tmp;
7680 
7681  return err;
7682  }
7683 
7684 
7685 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7686 
7690  template<typename T>
7692  T* ptr,
7693  const vector<Event>* events = NULL,
7694  Event* event = NULL) const
7695  {
7696  cl_event tmp;
7697  cl_int err = detail::errHandler(
7698  ::clEnqueueSVMUnmap(
7699  object_, static_cast<void*>(ptr),
7700  (events != NULL) ? (cl_uint)events->size() : 0,
7701  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7702  (event != NULL) ? &tmp : NULL),
7703  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7704 
7705  if (event != NULL && err == CL_SUCCESS)
7706  *event = tmp;
7707 
7708  return err;
7709  }
7710 
7715  template<typename T, class D>
7717  cl::pointer<T, D> &ptr,
7718  const vector<Event>* events = NULL,
7719  Event* event = NULL) const
7720  {
7721  cl_event tmp;
7722  cl_int err = detail::errHandler(
7723  ::clEnqueueSVMUnmap(
7724  object_, static_cast<void*>(ptr.get()),
7725  (events != NULL) ? (cl_uint)events->size() : 0,
7726  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7727  (event != NULL) ? &tmp : NULL),
7728  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7729 
7730  if (event != NULL && err == CL_SUCCESS)
7731  *event = tmp;
7732 
7733  return err;
7734  }
7735 
7740  template<typename T, class Alloc>
7742  cl::vector<T, Alloc> &container,
7743  const vector<Event>* events = NULL,
7744  Event* event = NULL) const
7745  {
7746  cl_event tmp;
7747  cl_int err = detail::errHandler(
7748  ::clEnqueueSVMUnmap(
7749  object_, static_cast<void*>(container.data()),
7750  (events != NULL) ? (cl_uint)events->size() : 0,
7751  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7752  (event != NULL) ? &tmp : NULL),
7753  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7754 
7755  if (event != NULL && err == CL_SUCCESS)
7756  *event = tmp;
7757 
7758  return err;
7759  }
7760 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7761 
7762 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7763 
7775  const vector<Event> *events = 0,
7776  Event *event = 0)
7777  {
7778  cl_event tmp;
7779  cl_int err = detail::errHandler(
7780  ::clEnqueueMarkerWithWaitList(
7781  object_,
7782  (events != NULL) ? (cl_uint) events->size() : 0,
7783  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7784  (event != NULL) ? &tmp : NULL),
7785  __ENQUEUE_MARKER_WAIT_LIST_ERR);
7786 
7787  if (event != NULL && err == CL_SUCCESS)
7788  *event = tmp;
7789 
7790  return err;
7791  }
7792 
7805  const vector<Event> *events = 0,
7806  Event *event = 0)
7807  {
7808  cl_event tmp;
7809  cl_int err = detail::errHandler(
7810  ::clEnqueueBarrierWithWaitList(
7811  object_,
7812  (events != NULL) ? (cl_uint) events->size() : 0,
7813  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7814  (event != NULL) ? &tmp : NULL),
7815  __ENQUEUE_BARRIER_WAIT_LIST_ERR);
7816 
7817  if (event != NULL && err == CL_SUCCESS)
7818  *event = tmp;
7819 
7820  return err;
7821  }
7822 
7828  const vector<Memory> &memObjects,
7829  cl_mem_migration_flags flags,
7830  const vector<Event>* events = NULL,
7831  Event* event = NULL
7832  )
7833  {
7834  cl_event tmp;
7835 
7836  vector<cl_mem> localMemObjects(memObjects.size());
7837 
7838  for( int i = 0; i < (int)memObjects.size(); ++i ) {
7839  localMemObjects[i] = memObjects[i]();
7840  }
7841 
7842 
7843  cl_int err = detail::errHandler(
7844  ::clEnqueueMigrateMemObjects(
7845  object_,
7846  (cl_uint)memObjects.size(),
7847  localMemObjects.data(),
7848  flags,
7849  (events != NULL) ? (cl_uint) events->size() : 0,
7850  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7851  (event != NULL) ? &tmp : NULL),
7852  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7853 
7854  if (event != NULL && err == CL_SUCCESS)
7855  *event = tmp;
7856 
7857  return err;
7858  }
7859 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7860 
7861  cl_int enqueueNDRangeKernel(
7862  const Kernel& kernel,
7863  const NDRange& offset,
7864  const NDRange& global,
7865  const NDRange& local = NullRange,
7866  const vector<Event>* events = NULL,
7867  Event* event = NULL) const
7868  {
7869  cl_event tmp;
7870  cl_int err = detail::errHandler(
7871  ::clEnqueueNDRangeKernel(
7872  object_, kernel(), (cl_uint) global.dimensions(),
7873  offset.dimensions() != 0 ? (const size_type*) offset : NULL,
7874  (const size_type*) global,
7875  local.dimensions() != 0 ? (const size_type*) local : NULL,
7876  (events != NULL) ? (cl_uint) events->size() : 0,
7877  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7878  (event != NULL) ? &tmp : NULL),
7879  __ENQUEUE_NDRANGE_KERNEL_ERR);
7880 
7881  if (event != NULL && err == CL_SUCCESS)
7882  *event = tmp;
7883 
7884  return err;
7885  }
7886 
7887 #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
7888  CL_EXT_PREFIX__VERSION_1_2_DEPRECATED cl_int enqueueTask(
7889  const Kernel& kernel,
7890  const vector<Event>* events = NULL,
7891  Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
7892  {
7893  cl_event tmp;
7894  cl_int err = detail::errHandler(
7895  ::clEnqueueTask(
7896  object_, kernel(),
7897  (events != NULL) ? (cl_uint) events->size() : 0,
7898  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7899  (event != NULL) ? &tmp : NULL),
7900  __ENQUEUE_TASK_ERR);
7901 
7902  if (event != NULL && err == CL_SUCCESS)
7903  *event = tmp;
7904 
7905  return err;
7906  }
7907 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
7908 
7909  cl_int enqueueNativeKernel(
7910  void (CL_CALLBACK *userFptr)(void *),
7911  std::pair<void*, size_type> args,
7912  const vector<Memory>* mem_objects = NULL,
7913  const vector<const void*>* mem_locs = NULL,
7914  const vector<Event>* events = NULL,
7915  Event* event = NULL) const
7916  {
7917  size_type elements = 0;
7918  if (mem_objects != NULL) {
7919  elements = mem_objects->size();
7920  }
7921  vector<cl_mem> mems(elements);
7922  for (unsigned int i = 0; i < elements; i++) {
7923  mems[i] = ((*mem_objects)[i])();
7924  }
7925 
7926  cl_event tmp;
7927  cl_int err = detail::errHandler(
7928  ::clEnqueueNativeKernel(
7929  object_, userFptr, args.first, args.second,
7930  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
7931  mems.data(),
7932  (mem_locs != NULL && mem_locs->size() > 0) ? (const void **) &mem_locs->front() : NULL,
7933  (events != NULL) ? (cl_uint) events->size() : 0,
7934  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7935  (event != NULL) ? &tmp : NULL),
7936  __ENQUEUE_NATIVE_KERNEL);
7937 
7938  if (event != NULL && err == CL_SUCCESS)
7939  *event = tmp;
7940 
7941  return err;
7942  }
7943 
7947 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
7948  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
7949  cl_int enqueueMarker(Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
7950  {
7951  cl_event tmp;
7952  cl_int err = detail::errHandler(
7953  ::clEnqueueMarker(
7954  object_,
7955  (event != NULL) ? &tmp : NULL),
7956  __ENQUEUE_MARKER_ERR);
7957 
7958  if (event != NULL && err == CL_SUCCESS)
7959  *event = tmp;
7960 
7961  return err;
7962  }
7963 
7964  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
7965  cl_int enqueueWaitForEvents(const vector<Event>& events) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
7966  {
7967  return detail::errHandler(
7968  ::clEnqueueWaitForEvents(
7969  object_,
7970  (cl_uint) events.size(),
7971  events.size() > 0 ? (const cl_event*) &events.front() : NULL),
7972  __ENQUEUE_WAIT_FOR_EVENTS_ERR);
7973  }
7974 #endif // defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
7975 
7976  cl_int enqueueAcquireGLObjects(
7977  const vector<Memory>* mem_objects = NULL,
7978  const vector<Event>* events = NULL,
7979  Event* event = NULL) const
7980  {
7981  cl_event tmp;
7982  cl_int err = detail::errHandler(
7983  ::clEnqueueAcquireGLObjects(
7984  object_,
7985  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
7986  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
7987  (events != NULL) ? (cl_uint) events->size() : 0,
7988  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7989  (event != NULL) ? &tmp : NULL),
7990  __ENQUEUE_ACQUIRE_GL_ERR);
7991 
7992  if (event != NULL && err == CL_SUCCESS)
7993  *event = tmp;
7994 
7995  return err;
7996  }
7997 
7998  cl_int enqueueReleaseGLObjects(
7999  const vector<Memory>* mem_objects = NULL,
8000  const vector<Event>* events = NULL,
8001  Event* event = NULL) const
8002  {
8003  cl_event tmp;
8004  cl_int err = detail::errHandler(
8005  ::clEnqueueReleaseGLObjects(
8006  object_,
8007  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8008  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8009  (events != NULL) ? (cl_uint) events->size() : 0,
8010  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8011  (event != NULL) ? &tmp : NULL),
8012  __ENQUEUE_RELEASE_GL_ERR);
8013 
8014  if (event != NULL && err == CL_SUCCESS)
8015  *event = tmp;
8016 
8017  return err;
8018  }
8019 
8020 #if defined (CL_HPP_USE_DX_INTEROP)
8021 typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueAcquireD3D10ObjectsKHR)(
8022  cl_command_queue command_queue, cl_uint num_objects,
8023  const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
8024  const cl_event* event_wait_list, cl_event* event);
8025 typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueReleaseD3D10ObjectsKHR)(
8026  cl_command_queue command_queue, cl_uint num_objects,
8027  const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
8028  const cl_event* event_wait_list, cl_event* event);
8029 
8030  cl_int enqueueAcquireD3D10Objects(
8031  const vector<Memory>* mem_objects = NULL,
8032  const vector<Event>* events = NULL,
8033  Event* event = NULL) const
8034  {
8035  static PFN_clEnqueueAcquireD3D10ObjectsKHR pfn_clEnqueueAcquireD3D10ObjectsKHR = NULL;
8036 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8037  cl_context context = getInfo<CL_QUEUE_CONTEXT>();
8038  cl::Device device(getInfo<CL_QUEUE_DEVICE>());
8039  cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
8040  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueAcquireD3D10ObjectsKHR);
8041 #endif
8042 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8043  CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueAcquireD3D10ObjectsKHR);
8044 #endif
8045 
8046  cl_event tmp;
8047  cl_int err = detail::errHandler(
8048  pfn_clEnqueueAcquireD3D10ObjectsKHR(
8049  object_,
8050  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8051  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8052  (events != NULL) ? (cl_uint) events->size() : 0,
8053  (events != NULL) ? (cl_event*) &events->front() : NULL,
8054  (event != NULL) ? &tmp : NULL),
8055  __ENQUEUE_ACQUIRE_GL_ERR);
8056 
8057  if (event != NULL && err == CL_SUCCESS)
8058  *event = tmp;
8059 
8060  return err;
8061  }
8062 
8063  cl_int enqueueReleaseD3D10Objects(
8064  const vector<Memory>* mem_objects = NULL,
8065  const vector<Event>* events = NULL,
8066  Event* event = NULL) const
8067  {
8068  static PFN_clEnqueueReleaseD3D10ObjectsKHR pfn_clEnqueueReleaseD3D10ObjectsKHR = NULL;
8069 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8070  cl_context context = getInfo<CL_QUEUE_CONTEXT>();
8071  cl::Device device(getInfo<CL_QUEUE_DEVICE>());
8072  cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
8073  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueReleaseD3D10ObjectsKHR);
8074 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
8075 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8076  CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueReleaseD3D10ObjectsKHR);
8077 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
8078 
8079  cl_event tmp;
8080  cl_int err = detail::errHandler(
8081  pfn_clEnqueueReleaseD3D10ObjectsKHR(
8082  object_,
8083  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8084  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8085  (events != NULL) ? (cl_uint) events->size() : 0,
8086  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8087  (event != NULL) ? &tmp : NULL),
8088  __ENQUEUE_RELEASE_GL_ERR);
8089 
8090  if (event != NULL && err == CL_SUCCESS)
8091  *event = tmp;
8092 
8093  return err;
8094  }
8095 #endif
8096 
8100 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8101  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8102  cl_int enqueueBarrier() const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8103  {
8104  return detail::errHandler(
8105  ::clEnqueueBarrier(object_),
8106  __ENQUEUE_BARRIER_ERR);
8107  }
8108 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
8109 
8110  cl_int flush() const
8111  {
8112  return detail::errHandler(::clFlush(object_), __FLUSH_ERR);
8113  }
8114 
8115  cl_int finish() const
8116  {
8117  return detail::errHandler(::clFinish(object_), __FINISH_ERR);
8118  }
8119 }; // CommandQueue
8120 
8121 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag CommandQueue::default_initialized_;
8122 CL_HPP_DEFINE_STATIC_MEMBER_ CommandQueue CommandQueue::default_;
8123 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int CommandQueue::default_error_ = CL_SUCCESS;
8124 
8125 
8126 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8127 enum class DeviceQueueProperties : cl_command_queue_properties
8128 {
8129  None = 0,
8130  Profiling = CL_QUEUE_PROFILING_ENABLE,
8131 };
8132 
8133 inline DeviceQueueProperties operator|(DeviceQueueProperties lhs, DeviceQueueProperties rhs)
8134 {
8135  return static_cast<DeviceQueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
8136 }
8137 
8141 class DeviceCommandQueue : public detail::Wrapper<cl_command_queue>
8142 {
8143 public:
8144 
8149 
8153  DeviceCommandQueue(DeviceQueueProperties properties, cl_int* err = NULL)
8154  {
8155  cl_int error;
8158 
8159  cl_command_queue_properties mergedProperties =
8160  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8161 
8162  cl_queue_properties queue_properties[] = {
8163  CL_QUEUE_PROPERTIES, mergedProperties, 0 };
8164  object_ = ::clCreateCommandQueueWithProperties(
8165  context(), device(), queue_properties, &error);
8166 
8167  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8168  if (err != NULL) {
8169  *err = error;
8170  }
8171  }
8172 
8177  const Context& context,
8178  const Device& device,
8179  DeviceQueueProperties properties = DeviceQueueProperties::None,
8180  cl_int* err = NULL)
8181  {
8182  cl_int error;
8183 
8184  cl_command_queue_properties mergedProperties =
8185  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8186  cl_queue_properties queue_properties[] = {
8187  CL_QUEUE_PROPERTIES, mergedProperties, 0 };
8188  object_ = ::clCreateCommandQueueWithProperties(
8189  context(), device(), queue_properties, &error);
8190 
8191  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8192  if (err != NULL) {
8193  *err = error;
8194  }
8195  }
8196 
8201  const Context& context,
8202  const Device& device,
8203  cl_uint queueSize,
8204  DeviceQueueProperties properties = DeviceQueueProperties::None,
8205  cl_int* err = NULL)
8206  {
8207  cl_int error;
8208 
8209  cl_command_queue_properties mergedProperties =
8210  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8211  cl_queue_properties queue_properties[] = {
8212  CL_QUEUE_PROPERTIES, mergedProperties,
8213  CL_QUEUE_SIZE, queueSize,
8214  0 };
8215  object_ = ::clCreateCommandQueueWithProperties(
8216  context(), device(), queue_properties, &error);
8217 
8218  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8219  if (err != NULL) {
8220  *err = error;
8221  }
8222  }
8223 
8230  explicit DeviceCommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
8231  detail::Wrapper<cl_type>(commandQueue, retainObject) { }
8232 
8233  DeviceCommandQueue& operator = (const cl_command_queue& rhs)
8234  {
8236  return *this;
8237  }
8238 
8242  DeviceCommandQueue(const DeviceCommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
8243 
8248  {
8250  return *this;
8251  }
8252 
8256  DeviceCommandQueue(DeviceCommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
8257 
8262  {
8263  detail::Wrapper<cl_type>::operator=(std::move(queue));
8264  return *this;
8265  }
8266 
8267  template <typename T>
8268  cl_int getInfo(cl_command_queue_info name, T* param) const
8269  {
8270  return detail::errHandler(
8271  detail::getInfo(
8272  &::clGetCommandQueueInfo, object_, name, param),
8273  __GET_COMMAND_QUEUE_INFO_ERR);
8274  }
8275 
8276  template <cl_int name> typename
8277  detail::param_traits<detail::cl_command_queue_info, name>::param_type
8278  getInfo(cl_int* err = NULL) const
8279  {
8280  typename detail::param_traits<
8281  detail::cl_command_queue_info, name>::param_type param;
8282  cl_int result = getInfo(name, &param);
8283  if (err != NULL) {
8284  *err = result;
8285  }
8286  return param;
8287  }
8288 
8296  cl_int *err = nullptr)
8297  {
8298  cl_int error;
8301 
8302  cl_command_queue_properties properties =
8303  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8304  cl_queue_properties queue_properties[] = {
8305  CL_QUEUE_PROPERTIES, properties,
8306  0 };
8307  DeviceCommandQueue deviceQueue(
8308  ::clCreateCommandQueueWithProperties(
8309  context(), device(), queue_properties, &error));
8310 
8311  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8312  if (err != NULL) {
8313  *err = error;
8314  }
8315 
8316  return deviceQueue;
8317  }
8318 
8326  const Context &context, const Device &device, cl_int *err = nullptr)
8327  {
8328  cl_int error;
8329 
8330  cl_command_queue_properties properties =
8331  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8332  cl_queue_properties queue_properties[] = {
8333  CL_QUEUE_PROPERTIES, properties,
8334  0 };
8335  DeviceCommandQueue deviceQueue(
8336  ::clCreateCommandQueueWithProperties(
8337  context(), device(), queue_properties, &error));
8338 
8339  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8340  if (err != NULL) {
8341  *err = error;
8342  }
8343 
8344  return deviceQueue;
8345  }
8346 
8354  const Context &context, const Device &device, cl_uint queueSize, cl_int *err = nullptr)
8355  {
8356  cl_int error;
8357 
8358  cl_command_queue_properties properties =
8359  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8360  cl_queue_properties queue_properties[] = {
8361  CL_QUEUE_PROPERTIES, properties,
8362  CL_QUEUE_SIZE, queueSize,
8363  0 };
8364  DeviceCommandQueue deviceQueue(
8365  ::clCreateCommandQueueWithProperties(
8366  context(), device(), queue_properties, &error));
8367 
8368  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8369  if (err != NULL) {
8370  *err = error;
8371  }
8372 
8373  return deviceQueue;
8374  }
8375 }; // DeviceCommandQueue
8376 
8377 namespace detail
8378 {
8379  // Specialization for device command queue
8380  template <>
8382  {
8383  static size_type size(const cl::DeviceCommandQueue&) { return sizeof(cl_command_queue); }
8384  static const cl_command_queue* ptr(const cl::DeviceCommandQueue& value) { return &(value()); }
8385  };
8386 } // namespace detail
8387 
8388 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8389 
8390 
8391 template< typename IteratorType >
8393  const Context &context,
8394  IteratorType startIterator,
8395  IteratorType endIterator,
8396  bool readOnly,
8397  bool useHostPtr,
8398  cl_int* err)
8399 {
8400  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8401  cl_int error;
8402 
8403  cl_mem_flags flags = 0;
8404  if( readOnly ) {
8405  flags |= CL_MEM_READ_ONLY;
8406  }
8407  else {
8408  flags |= CL_MEM_READ_WRITE;
8409  }
8410  if( useHostPtr ) {
8411  flags |= CL_MEM_USE_HOST_PTR;
8412  }
8413 
8414  size_type size = sizeof(DataType)*(endIterator - startIterator);
8415 
8416  if( useHostPtr ) {
8417  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
8418  } else {
8419  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
8420  }
8421 
8422  detail::errHandler(error, __CREATE_BUFFER_ERR);
8423  if (err != NULL) {
8424  *err = error;
8425  }
8426 
8427  if( !useHostPtr ) {
8428  CommandQueue queue(context, 0, &error);
8429  detail::errHandler(error, __CREATE_BUFFER_ERR);
8430  if (err != NULL) {
8431  *err = error;
8432  }
8433 
8434  error = cl::copy(queue, startIterator, endIterator, *this);
8435  detail::errHandler(error, __CREATE_BUFFER_ERR);
8436  if (err != NULL) {
8437  *err = error;
8438  }
8439  }
8440 }
8441 
8442 template< typename IteratorType >
8444  const CommandQueue &queue,
8445  IteratorType startIterator,
8446  IteratorType endIterator,
8447  bool readOnly,
8448  bool useHostPtr,
8449  cl_int* err)
8450 {
8451  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8452  cl_int error;
8453 
8454  cl_mem_flags flags = 0;
8455  if (readOnly) {
8456  flags |= CL_MEM_READ_ONLY;
8457  }
8458  else {
8459  flags |= CL_MEM_READ_WRITE;
8460  }
8461  if (useHostPtr) {
8462  flags |= CL_MEM_USE_HOST_PTR;
8463  }
8464 
8465  size_type size = sizeof(DataType)*(endIterator - startIterator);
8466 
8467  Context context = queue.getInfo<CL_QUEUE_CONTEXT>();
8468 
8469  if (useHostPtr) {
8470  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
8471  }
8472  else {
8473  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
8474  }
8475 
8476  detail::errHandler(error, __CREATE_BUFFER_ERR);
8477  if (err != NULL) {
8478  *err = error;
8479  }
8480 
8481  if (!useHostPtr) {
8482  error = cl::copy(queue, startIterator, endIterator, *this);
8483  detail::errHandler(error, __CREATE_BUFFER_ERR);
8484  if (err != NULL) {
8485  *err = error;
8486  }
8487  }
8488 }
8489 
8490 inline cl_int enqueueReadBuffer(
8491  const Buffer& buffer,
8492  cl_bool blocking,
8493  size_type offset,
8494  size_type size,
8495  void* ptr,
8496  const vector<Event>* events = NULL,
8497  Event* event = NULL)
8498 {
8499  cl_int error;
8500  CommandQueue queue = CommandQueue::getDefault(&error);
8501 
8502  if (error != CL_SUCCESS) {
8503  return error;
8504  }
8505 
8506  return queue.enqueueReadBuffer(buffer, blocking, offset, size, ptr, events, event);
8507 }
8508 
8509 inline cl_int enqueueWriteBuffer(
8510  const Buffer& buffer,
8511  cl_bool blocking,
8512  size_type offset,
8513  size_type size,
8514  const void* ptr,
8515  const vector<Event>* events = NULL,
8516  Event* event = NULL)
8517 {
8518  cl_int error;
8519  CommandQueue queue = CommandQueue::getDefault(&error);
8520 
8521  if (error != CL_SUCCESS) {
8522  return error;
8523  }
8524 
8525  return queue.enqueueWriteBuffer(buffer, blocking, offset, size, ptr, events, event);
8526 }
8527 
8528 inline void* enqueueMapBuffer(
8529  const Buffer& buffer,
8530  cl_bool blocking,
8531  cl_map_flags flags,
8532  size_type offset,
8533  size_type size,
8534  const vector<Event>* events = NULL,
8535  Event* event = NULL,
8536  cl_int* err = NULL)
8537 {
8538  cl_int error;
8539  CommandQueue queue = CommandQueue::getDefault(&error);
8540  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8541  if (err != NULL) {
8542  *err = error;
8543  }
8544 
8545  void * result = ::clEnqueueMapBuffer(
8546  queue(), buffer(), blocking, flags, offset, size,
8547  (events != NULL) ? (cl_uint) events->size() : 0,
8548  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8549  (cl_event*) event,
8550  &error);
8551 
8552  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8553  if (err != NULL) {
8554  *err = error;
8555  }
8556  return result;
8557 }
8558 
8559 
8560 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8561 
8566 template<typename T>
8567 inline cl_int enqueueMapSVM(
8568  T* ptr,
8569  cl_bool blocking,
8570  cl_map_flags flags,
8571  size_type size,
8572  const vector<Event>* events,
8573  Event* event)
8574 {
8575  cl_int error;
8576  CommandQueue queue = CommandQueue::getDefault(&error);
8577  if (error != CL_SUCCESS) {
8578  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8579  }
8580 
8581  return queue.enqueueMapSVM(
8582  ptr, blocking, flags, size, events, event);
8583 }
8584 
8590 template<typename T, class D>
8591 inline cl_int enqueueMapSVM(
8592  cl::pointer<T, D> ptr,
8593  cl_bool blocking,
8594  cl_map_flags flags,
8595  size_type size,
8596  const vector<Event>* events = NULL,
8597  Event* event = NULL)
8598 {
8599  cl_int error;
8600  CommandQueue queue = CommandQueue::getDefault(&error);
8601  if (error != CL_SUCCESS) {
8602  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8603  }
8604 
8605  return queue.enqueueMapSVM(
8606  ptr, blocking, flags, size, events, event);
8607 }
8608 
8614 template<typename T, class Alloc>
8615 inline cl_int enqueueMapSVM(
8616  cl::vector<T, Alloc> container,
8617  cl_bool blocking,
8618  cl_map_flags flags,
8619  const vector<Event>* events = NULL,
8620  Event* event = NULL)
8621 {
8622  cl_int error;
8623  CommandQueue queue = CommandQueue::getDefault(&error);
8624  if (error != CL_SUCCESS) {
8625  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8626  }
8627 
8628  return queue.enqueueMapSVM(
8629  container, blocking, flags, events, event);
8630 }
8631 
8632 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8633 
8634 inline cl_int enqueueUnmapMemObject(
8635  const Memory& memory,
8636  void* mapped_ptr,
8637  const vector<Event>* events = NULL,
8638  Event* event = NULL)
8639 {
8640  cl_int error;
8641  CommandQueue queue = CommandQueue::getDefault(&error);
8642  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8643  if (error != CL_SUCCESS) {
8644  return error;
8645  }
8646 
8647  cl_event tmp;
8648  cl_int err = detail::errHandler(
8649  ::clEnqueueUnmapMemObject(
8650  queue(), memory(), mapped_ptr,
8651  (events != NULL) ? (cl_uint)events->size() : 0,
8652  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8653  (event != NULL) ? &tmp : NULL),
8654  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8655 
8656  if (event != NULL && err == CL_SUCCESS)
8657  *event = tmp;
8658 
8659  return err;
8660 }
8661 
8662 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8663 
8668 template<typename T>
8669 inline cl_int enqueueUnmapSVM(
8670  T* ptr,
8671  const vector<Event>* events = NULL,
8672  Event* event = NULL)
8673 {
8674  cl_int error;
8675  CommandQueue queue = CommandQueue::getDefault(&error);
8676  if (error != CL_SUCCESS) {
8677  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8678  }
8679 
8680  return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
8681  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8682 
8683 }
8684 
8690 template<typename T, class D>
8691 inline cl_int enqueueUnmapSVM(
8692  cl::pointer<T, D> &ptr,
8693  const vector<Event>* events = NULL,
8694  Event* event = NULL)
8695 {
8696  cl_int error;
8697  CommandQueue queue = CommandQueue::getDefault(&error);
8698  if (error != CL_SUCCESS) {
8699  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8700  }
8701 
8702  return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
8703  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8704 }
8705 
8711 template<typename T, class Alloc>
8712 inline cl_int enqueueUnmapSVM(
8713  cl::vector<T, Alloc> &container,
8714  const vector<Event>* events = NULL,
8715  Event* event = NULL)
8716 {
8717  cl_int error;
8718  CommandQueue queue = CommandQueue::getDefault(&error);
8719  if (error != CL_SUCCESS) {
8720  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8721  }
8722 
8723  return detail::errHandler(queue.enqueueUnmapSVM(container, events, event),
8724  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8725 }
8726 
8727 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8728 
8729 inline cl_int enqueueCopyBuffer(
8730  const Buffer& src,
8731  const Buffer& dst,
8732  size_type src_offset,
8733  size_type dst_offset,
8734  size_type size,
8735  const vector<Event>* events = NULL,
8736  Event* event = NULL)
8737 {
8738  cl_int error;
8739  CommandQueue queue = CommandQueue::getDefault(&error);
8740 
8741  if (error != CL_SUCCESS) {
8742  return error;
8743  }
8744 
8745  return queue.enqueueCopyBuffer(src, dst, src_offset, dst_offset, size, events, event);
8746 }
8747 
8753 template< typename IteratorType >
8754 inline cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
8755 {
8756  cl_int error;
8757  CommandQueue queue = CommandQueue::getDefault(&error);
8758  if (error != CL_SUCCESS)
8759  return error;
8760 
8761  return cl::copy(queue, startIterator, endIterator, buffer);
8762 }
8763 
8769 template< typename IteratorType >
8770 inline cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
8771 {
8772  cl_int error;
8773  CommandQueue queue = CommandQueue::getDefault(&error);
8774  if (error != CL_SUCCESS)
8775  return error;
8776 
8777  return cl::copy(queue, buffer, startIterator, endIterator);
8778 }
8779 
8785 template< typename IteratorType >
8786 inline cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
8787 {
8788  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8789  cl_int error;
8790 
8791  size_type length = endIterator-startIterator;
8792  size_type byteLength = length*sizeof(DataType);
8793 
8794  DataType *pointer =
8795  static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_WRITE, 0, byteLength, 0, 0, &error));
8796  // if exceptions enabled, enqueueMapBuffer will throw
8797  if( error != CL_SUCCESS ) {
8798  return error;
8799  }
8800 #if defined(_MSC_VER)
8801  std::copy(
8802  startIterator,
8803  endIterator,
8804  stdext::checked_array_iterator<DataType*>(
8805  pointer, length));
8806 #else
8807  std::copy(startIterator, endIterator, pointer);
8808 #endif
8809  Event endEvent;
8810  error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
8811  // if exceptions enabled, enqueueUnmapMemObject will throw
8812  if( error != CL_SUCCESS ) {
8813  return error;
8814  }
8815  endEvent.wait();
8816  return CL_SUCCESS;
8817 }
8818 
8824 template< typename IteratorType >
8825 inline cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
8826 {
8827  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8828  cl_int error;
8829 
8830  size_type length = endIterator-startIterator;
8831  size_type byteLength = length*sizeof(DataType);
8832 
8833  DataType *pointer =
8834  static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_READ, 0, byteLength, 0, 0, &error));
8835  // if exceptions enabled, enqueueMapBuffer will throw
8836  if( error != CL_SUCCESS ) {
8837  return error;
8838  }
8839  std::copy(pointer, pointer + length, startIterator);
8840  Event endEvent;
8841  error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
8842  // if exceptions enabled, enqueueUnmapMemObject will throw
8843  if( error != CL_SUCCESS ) {
8844  return error;
8845  }
8846  endEvent.wait();
8847  return CL_SUCCESS;
8848 }
8849 
8850 
8851 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8852 
8855 template<typename T, class Alloc>
8856 inline cl_int mapSVM(cl::vector<T, Alloc> &container)
8857 {
8858  return enqueueMapSVM(container, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE);
8859 }
8860 
8864 template<typename T, class Alloc>
8865 inline cl_int unmapSVM(cl::vector<T, Alloc> &container)
8866 {
8867  return enqueueUnmapSVM(container);
8868 }
8869 
8870 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8871 
8872 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8873 inline cl_int enqueueReadBufferRect(
8874  const Buffer& buffer,
8875  cl_bool blocking,
8876  const array<size_type, 3>& buffer_offset,
8877  const array<size_type, 3>& host_offset,
8878  const array<size_type, 3>& region,
8879  size_type buffer_row_pitch,
8880  size_type buffer_slice_pitch,
8881  size_type host_row_pitch,
8882  size_type host_slice_pitch,
8883  void *ptr,
8884  const vector<Event>* events = NULL,
8885  Event* event = NULL)
8886 {
8887  cl_int error;
8888  CommandQueue queue = CommandQueue::getDefault(&error);
8889 
8890  if (error != CL_SUCCESS) {
8891  return error;
8892  }
8893 
8894  return queue.enqueueReadBufferRect(
8895  buffer,
8896  blocking,
8897  buffer_offset,
8898  host_offset,
8899  region,
8900  buffer_row_pitch,
8901  buffer_slice_pitch,
8902  host_row_pitch,
8903  host_slice_pitch,
8904  ptr,
8905  events,
8906  event);
8907 }
8908 
8909 inline cl_int enqueueWriteBufferRect(
8910  const Buffer& buffer,
8911  cl_bool blocking,
8912  const array<size_type, 3>& buffer_offset,
8913  const array<size_type, 3>& host_offset,
8914  const array<size_type, 3>& region,
8915  size_type buffer_row_pitch,
8916  size_type buffer_slice_pitch,
8917  size_type host_row_pitch,
8918  size_type host_slice_pitch,
8919  const void *ptr,
8920  const vector<Event>* events = NULL,
8921  Event* event = NULL)
8922 {
8923  cl_int error;
8924  CommandQueue queue = CommandQueue::getDefault(&error);
8925 
8926  if (error != CL_SUCCESS) {
8927  return error;
8928  }
8929 
8930  return queue.enqueueWriteBufferRect(
8931  buffer,
8932  blocking,
8933  buffer_offset,
8934  host_offset,
8935  region,
8936  buffer_row_pitch,
8937  buffer_slice_pitch,
8938  host_row_pitch,
8939  host_slice_pitch,
8940  ptr,
8941  events,
8942  event);
8943 }
8944 
8945 inline cl_int enqueueCopyBufferRect(
8946  const Buffer& src,
8947  const Buffer& dst,
8948  const array<size_type, 3>& src_origin,
8949  const array<size_type, 3>& dst_origin,
8950  const array<size_type, 3>& region,
8951  size_type src_row_pitch,
8952  size_type src_slice_pitch,
8953  size_type dst_row_pitch,
8954  size_type dst_slice_pitch,
8955  const vector<Event>* events = NULL,
8956  Event* event = NULL)
8957 {
8958  cl_int error;
8959  CommandQueue queue = CommandQueue::getDefault(&error);
8960 
8961  if (error != CL_SUCCESS) {
8962  return error;
8963  }
8964 
8965  return queue.enqueueCopyBufferRect(
8966  src,
8967  dst,
8968  src_origin,
8969  dst_origin,
8970  region,
8971  src_row_pitch,
8972  src_slice_pitch,
8973  dst_row_pitch,
8974  dst_slice_pitch,
8975  events,
8976  event);
8977 }
8978 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
8979 
8980 inline cl_int enqueueReadImage(
8981  const Image& image,
8982  cl_bool blocking,
8983  const array<size_type, 3>& origin,
8984  const array<size_type, 3>& region,
8985  size_type row_pitch,
8986  size_type slice_pitch,
8987  void* ptr,
8988  const vector<Event>* events = NULL,
8989  Event* event = NULL)
8990 {
8991  cl_int error;
8992  CommandQueue queue = CommandQueue::getDefault(&error);
8993 
8994  if (error != CL_SUCCESS) {
8995  return error;
8996  }
8997