我创建了一个自定义的CCSprite,然后添加了一张png图片。
CCTexture2D *paddleTexture = [[CCTextureCache sharedTextureCache] addImage:@"1.png"]; MySprite *mSprite = [MySprite paddleWithTexture:paddleTexture];当手指触摸到这个CCSprite时,我该如何判断当前触摸的位置是否是在图片上的透明区域,还是在图片的图像上。请高手们帮帮忙,谢谢。备注://判断触摸点是否在CCSprite上- (BOOL)containsTouchLocation:(UITouch *)touch{ CGPoint p = [self convertTouchToNodeSpaceAR:touch]; CGRect r = [self rectInPixels]; return CGRectContainsPoint(r, p);}====解决方案如下================================CGImageRef inImage;调用getPixelColorAtLocation方法,传入图片上的一个点,根据返回值判断是否透明区域(如果返回值为0则表示是透明区域)- (int)getPixelColorAtLocation:(CGPoint)point { CGContextRef cgctx = [self createARGBBitmapContextFromImage]; if (cgctx == NULL) { return -1; /* error */ } size_t w = CGImageGetWidth(inImage); size_t h = CGImageGetHeight(inImage); CGRect rect = { {0,0},{w,h}}; CGContextDrawImage(cgctx, rect, inImage); unsigned char* data = CGBitmapContextGetData (cgctx); int alpha; if (data != NULL) { @try { int offset = 4*((w*round(point.y))+round(point.x)); alpha = data[offset]; } @catch (NSException * e) { } @finally { } } CGContextRelease(cgctx); if (data) { free(data); } return alpha;}- (CGContextRef)createARGBBitmapContextFromImage{ CGContextRef context = NULL; CGColorSpaceRef colorSpace; void * bitmapData; int bitmapByteCount; int bitmapBytesPerRow; size_t pixelsWide = CGImageGetWidth(inImage); size_t pixelsHigh = CGImageGetHeight(inImage); bitmapBytesPerRow = (pixelsWide * 4); bitmapByteCount = (bitmapBytesPerRow * pixelsHigh); colorSpace = CGColorSpaceCreateDeviceRGB(); if (colorSpace == NULL) return nil; bitmapData = malloc( bitmapByteCount ); if (bitmapData == NULL) { CGColorSpaceRelease( colorSpace ); return nil; } context = CGBitmapContextCreate (bitmapData, pixelsWide, pixelsHigh, 8, bitmapBytesPerRow, colorSpace, kCGImageAlphaPremultipliedFirst|kCGBitmapByteOrder32Big); if (context == NULL) { free (bitmapData); fprintf (stderr, "Context not created!"); } CGColorSpaceRelease( colorSpace ); CGContextSetBlendMode(context, kCGBlendModeCopy); return context;}