-1

I'm working on creating and storing OpenGL ES1 3D models, and want to include image files to be used as textures, within the same file as the 3D model data. I am having trouble loading the image data in a usable format. I'm using UIImageJPEGRepresentation to convert the image data and store it into an NSData. I then append it to a NSMutableData object, along with all the 3D data, and write it out to a file. The data seems to write and read without error, but I encounter problems when trying use the image data to create a "CGImageRef" which I use to generate the texture data for the 3D model. The image data seems to be in an unrecognized format after it is loaded from the file, because it generates the error "CGContextDrawImage: invalid context 0x0.” when I attempt to create the "CGImageRef". I suspect that the image data is gettin misaligned somehow, causing it to be rejected when attempting to create the "CGImageRef". I appreciate any help. I'm stumped at this point. All of the data sizes and offsets add up and look fine. Saves and loads happen without error. The image data just seems off a bit, but I don't know why.

Here's my code:

//======================================================
- (BOOL)save3DFile: (NSString *)filePath {


  // load TEST IMAGE into UIIMAGE
  UIImage *image = [UIImage imageNamed:@“testImage.jpg"];

  // convert image to JPEG encoded NSDATA
  NSData *imageData = UIImageJPEGRepresentation(image,1.0);

  // Save length of imageData to global "imDataLen" to use later in “load3DFile”
  imDataLen = [imageData length];

  // TEST: this works fine for CGImageRef creation in “loadTexture” 
  // traceView.image=[UIImage imageWithData:[imageData subdataWithRange:NSMakeRange(0, imageDataLen)]];  
  // [self loadTexture];

  // TEST: this also works fine for CGImageRef creation in “loadTexture” 
  // traceView.image=[UIImage imageWithData:txImData];
  // [self loadTexture];

  fvoh.fileVersion  = FVO_VERSION;
  fvoh.obVertDatLen = obVertDatLen;
  fvoh.obFaceDatLen = obFaceDatLen;
  fvoh.obNormDatLen = obNormDatLen;
  fvoh.obTextDatLen = obTextDatLen;
  fvoh.obCompCount  = obCompCount;
  fvoh.obVertCount  = obVertCount;
  fvoh.obElemCount  = obElemCount;
  fvoh.obElemSize   = obElemSize;
  fvoh.obElemType   = obElemType;

  NSMutableData *obSvData;
  obSvData=[NSMutableData dataWithBytes:&fvoh length:(sizeof(fvoh))];
  [obSvData appendBytes:obElem   length:obFaceDatLen];
  [obSvData appendBytes:mvElem   length:obVertDatLen];
  [obSvData appendBytes:mvNorm   length:obNormDatLen];
  [obSvData appendBytes:obText   length:obTextDatLen];
  [obSvData appendBytes:&ds      length:(sizeof(ds))];

  // next, we append image data, and write all data to a file
  // seems to work fine, no errors, at this point
  [obSvData appendBytes: imageData length:[imageData length]];  

  BOOL success=[obSvData writeToFile: filePath atomically:YES];
  return success; 
}
//======================================================
- (void) load3DFile:(NSString *)filePath {

  NSData *fvoData;
  NSUInteger offSet,fiLen,fhLen,dsLen;
  [[FileList sharedFileList] setCurrFile:(NSString *)filePath];

  fvoData=[NSData dataWithContentsOfFile:filePath];
  fiLen=[fvoData length];
  fhLen=sizeof(fvoh);
  dsLen=sizeof(ds);

  memcpy(&fvoh,[fvoData bytes],fhLen);offSet=fhLen;

  //+++++++++++++++++++++++++++++++
  obVertDatLen = fvoh.obVertDatLen;
  obFaceDatLen = fvoh.obFaceDatLen;
  obNormDatLen = fvoh.obNormDatLen;
  obTextDatLen = fvoh.obTextDatLen;
  obCompCount  = fvoh.obCompCount;
  obVertCount  = fvoh.obVertCount;
  obElemCount  = fvoh.obElemCount;
  obElemSize   = fvoh.obElemSize;
  obElemType   = fvoh.obElemType;
  //+++++++++++++++++++++++++++++++

  memcpy(obElem, [fvoData bytes]+offSet,obFaceDatLen);offSet+=obFaceDatLen;
  memcpy(mvElem, [fvoData bytes]+offSet,obVertDatLen);offSet+=obVertDatLen;
  memcpy(mvNorm, [fvoData bytes]+offSet,obNormDatLen);offSet+=obNormDatLen;
  memcpy(obText, [fvoData bytes]+offSet,obTextDatLen);offSet+=obTextDatLen;
  memcpy(&ds,    [fvoData bytes]+offSet,dsLen);offSet+=dsLen;

  // the following seem to read the data into “imageData” just fine, no errors
  // NSData *imageData = [fvoData subdataWithRange:NSMakeRange(offSet, imDataLen)];
  // NSData *imageData = [fvoData subdataWithRange:NSMakeRange((fiLen-imDataLen), imDataLen)];
  // NSData *imageData = [NSData dataWithBytes:[fvoData bytes]+offSet length: imDataLen];
  NSData *imageData = [NSData dataWithBytes:[fvoData bytes]+(fiLen-imDataLen) length: imDataLen];

  // but the contents of imageData seem to end up in an unexpected format, causing error: 
  // “CGContextDrawImage: invalid context 0x0.” during CGImageRef creation in “loadTexture”

  traceView.image=[UIImage imageWithData:imageData];
  [self loadTexture];
}
//======================================================
- (void)loadTexture {

  CGImageRef image=[traceView.image].CGImage;
  CGContextRef texContext;GLubyte* bytes=nil;GLsizei width,height;

  if(image){
      width=(GLsizei)CGImageGetWidth(image);
      height=(GLsizei)CGImageGetHeight(image);
      bytes=(GLubyte*) calloc(width*height*4,sizeof(GLubyte));
      texContext=CGBitmapContextCreate(bytes,width,height,8,width*4,CGImageGetColorSpace(image),
      kCGImageAlphaPremultipliedLast);
      CGContextDrawImage(texContext,CGRectMake(0.0,0.0,(CGFloat)width,(CGFloat)height),image);
      CGContextRelease(texContext);
  }

  if(bytes){
      glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
      glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
      glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP_TO_EDGE);
      glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP_TO_EDGE);
      glTexImage2D(GL_TEXTURE_2D,0,GL_RGBA,width,height,0,GL_RGBA,GL_UNSIGNED_BYTE,bytes);
      free(bytes);
  }
}
//======================================================
Jon L.
  • 31
  • 2

1 Answers1

0

I failed to receive any answers to this question. I finally stumbled across the answer myself. When I execute the save3DFile code, instead of adding the image data to NSMutableData *obSvData, using 'appendBytes' as illustrated below:

[obSvData appendBytes: imageData length:[imageData length]];

I instead use 'appendData' as shown here:

[obSvData appendData: imageData];

where imageData was previously filled with the contents of a UIImage and converted to JPEG format in the process as follows:

NSData *imageData = UIImageJPEGRepresentation(image,1.0);

See the complete code listing above for context. Anyway, the using 'appendData' instead of 'appendBytes' made all the difference, and allowed me to store the image data in the same file along with all the other 3D model data (vertices, indices, normals, et cetera), reloading all that data without problem, and successfully create 3D models with textures from a single file.

Jon L.
  • 31
  • 2