iOS Code Snippets

Created for Appitecture Visual Studies Session B
Graduate School of Architecture, Planning Preservation
Columbia University

Creating Objects

// Use "*" when declaring objects (including your own). This designates a "pointer".

UILabel *label;
label = [[UILabel alloc] init];

/* All objects are "allocated" and "initialized". 
Allocation sets up a block of memory for the object.
Initializing sets up variables. If you don't see a "*", 
you are probably seeing a c struct (structure).
Structures are different from classes in that they only 
store data (no behaviors). */

CGRect rect = CGRectMake(0,0,300,200);
// Common structures in iOS are CGPoint, CGRect, CGSize.

Dot Syntax


UILabel *label = [[UILabel alloc] init];
[label setText:@"Hello World"];
//is functionally equivalent to...
label.text = @"Hello World";
//Note that a string can be created by prefixing "@" to a string in quotes, for instance:
NSlog(@"Made Label");

Adding to View Hierarchy

//In order for your UI to display, you must add it to the view hierarchy
[self.window addSubview:label];

Accessing Accelerometer

//Defining our object as a delegate in the ".h" file:
@interface AppDelegate_iPhone : NSObject <UIApplicationDelegate, UIAccelerometerDelegate> {
    UIWindow *window;

//Accessing the UIAccelerometer Singleton in the ".m" files
[[UIAccelerometer sharedAccelerometer] setDelegate:self];
//To conform to the delegate protocol, we add a method in our ".m" file.
-(void)accelerometer:(UIAccelerometer *)accelerometer didAccelerate:(UIAcceleration *)acceleration{
	NSLog(@"%f, %f, %f", acceleration.x, acceleration.y, acceleration.z);

Adding a Button

    UIButton *b = [UIButton buttonWithType:UIButtonTypeRoundedRect];
    [b setFrame:CGRectMake(10, 10, 200, 100)];
    [b setTitle:@"Camera" forState:UIControlStateNormal];
    [b addTarget:self action:@selector(showCamera) forControlEvents:UIControlEventTouchUpInside];

Showing Camera

- (void)showCamera
    if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera])
        UIImagePickerController *imagePickerController = [[UIImagePickerController alloc] init];
        imagePickerController.modalPresentationStyle = UIModalPresentationCurrentContext;
        imagePickerController.sourceType = UIImagePickerControllerSourceTypeCamera;
        imagePickerController.delegate = self;
        [self presentViewController:imagePickerController animated:YES completion:nil];

footnote : UIImagePickerController requires two delegates UINavigationControllerDelegate, UIImagePickerControllerDelegate

Accessing Gyro Data

mm = [[CMMotionManager alloc] init];

// Check the gyro is active
if([mm isGyroAvailable])
	NSLog(@"gryo is avialable");

	if ([mm isGyroActive] == NO)
		// Turn it on with interval of update
		//NSLog(@"activating gyro");
		[mm setGyroUpdateInterval:1.f/30.f];
	[mm startGyroUpdatesToQueue:[NSOperationQueue mainQueue] withHandler:
	 ^(CMGyroData *gyroData, NSError *error)
		 double x = gyroData.rotationRate.x * 0.1;
		 double y = gyroData.rotationRate.y * 0.1;
		 double z = gyroData.rotationRate.z * 0.1;
		 self.window.backgroundColor = [UIColor colorWithRed:x green:y blue:z alpha:1.0];
		 //NSLog(@"%@, %@, %@", x, y, z);

	NSLog(@"gyro is not available");

Playing a Sound

NSString *soundFilePath = [[NSBundle mainBundle] pathForResource: @"sound" ofType: @"wav"];
NSURL *fileURL = [[NSURL alloc] initFileURLWithPath: soundFilePath];
AVAudioPlayer *player = [[AVAudioPlayer alloc] initWithContentsOfURL: fileURL error: nil]; 
[player prepareToPlay];
[player setDelegate:self];

Recording a Sound

NSString *tempDir = NSTemporaryDirectory ();
NSString *soundFilePath = [tempDir stringByAppendingString: @"sound.caf"]; 
NSURL *soundFileURL = [[NSURL alloc] initFileURLWithPath: soundFilePath];
AVAudioRecorder *recorder = [[AVAudioRecorder alloc] initWithURL:soundFileURL
                                        settings: recordSettings
                                           error: nil];
recorder.delegate = self;
[recorder prepareToRecord];
[recorder record];

Touch Delegation

- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event;
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event;
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event;
- (void)touchesCancelled:(NSSet *)touches withEvent:(UIEvent *)event

Animate View Properties

[UIView beginAnimations:@"id" context:nil];
[UIView setAnimationDelay:2.0];
[UIView setAnimationDuration:1.0];
[UIView setAnimationCurve:UIViewAnimationCurveEaseInOut];
[view setAlpha:0.4];
[view setCenter:CGPointMake(200, 400)];
[UIView commitAnimations];

Getting the GPS Location

// Getting our Location
// See delegate method "locationManager:(CLLocationManager *)manager didUpdateToLocation:" for response
CLLocationManager *locationManager = [[CLLocationManager alloc] init];
locationManager.delegate = self; // set ourselves as the delegate
locationManager.desiredAccuracy = kCLLocationAccuracyBest;
[locationManager startUpdatingLocation];

Responding to a Location Event

- (void)locationManager:(CLLocationManager *)manager didUpdateToLocation:(CLLocation *)newLocation fromLocation:(CLLocation *)oldLocation 
    float lat = newLocation.coordinate.latitude;
    float lon = newLocation.coordinate.longitude;
    NSLog(@"latitude %.5f, longitude %.5f\n",lat,lon);

Adding a Map

// Creating a Map with a custom center and span 
MKMapView *map = [[MKMapView alloc] initWithFrame:self.view.frame];
CLLocationCoordinate2D coord = CLLocationCoordinate2DMake(40.80827, -73.96098);
MKCoordinateSpan span = MKCoordinateSpanMake(.01,.01);
[map setRegion:MKCoordinateRegionMake(coord, span)];
[self.view addSubview:map];

Adding a Pin Annotation

CLLocationCoordinate2D coord = CLLocationCoordinate2DMake(40.80827, -73.96098);
MKPointAnnotation *annotation = [[MKPointAnnotation alloc] init];
annotation.coordinate = coord;
annotation.title = @"User Touch";
[map addAnnotation:annotation];

Adding a Gesture Recognizer to a View

UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(handleGesture:)];   
tap.numberOfTapsRequired = 2;
tap.numberOfTouchesRequired = 1;
[map addGestureRecognizer:tap];
[tap release];

// This assumes you have implemented:
- (void)handleGesture:(UIGestureRecognizer *)gestureRecognizer{

Using an Image as an Annotation

-(MKAnnotationView*)mapView:(MKMapView *)mapView viewForAnnotation:(id)annotation{
    MKAnnotationView* aView = (MKAnnotationView*)[mapView dequeueReusableAnnotationViewWithIdentifier:@"CustomAnnotation"];
        MKAnnotationView* aView = [[[MKAnnotationView alloc] initWithAnnotation:annotation reuseIdentifier:@"CustomAnnotation"] autorelease];
        aView.image = [UIImage imageNamed:@"GradientDot_128.png"];
    return aView;

Creating Colors

UIColor *blueColor = [UIColor colorWithRed:0.0 green:0.0 blue:1.0 alpha:1.0];
UIColor *whiteColor = [UIColor colorWithWhite:1.0 alpha:1.0];
UIColor *randomColor = [UIColor colorWithHue:(random()%100)/100.0 saturation:1.0 brightness:1.0 alpha:1.0];

Simple DrawRect:

 (void)drawRect:(CGRect)rect {
	// Create a CGContext and clear it
	CGContextRef context = UIGraphicsGetCurrentContext();
	CGContextClearRect(context, [[UIScreen mainScreen] bounds]);

	// draw a rectangle
	[[UIColor lightGrayColor] setFill];
	CGContextFillRect(context, CGRectMake(20, 40, 280, 420));

Draw a matrix of rectangles

// Draw a matrix of rectangles (half not drawn)
[[UIColor blackColor] setStroke];
[[UIColor whiteColor] setFill];
for(int i=0; i<rect.size.width; i+=20){
  for(int j=0; j<rect.size.height; j+=20){
    if( random()%100 > 50){
      CGContextStrokeRect(context, CGRectMake(i,j,20,20));
      CGContextFillRect(context, CGRectMake(i,j,20,20));

Draw a Shape

// Make a Path
CGPoint origin =;
float radius = 100;
float segs = 6.0;
for (int i=0; i<segs; i++) {
	float x = sinf(i/segs * 6.28) * radius + origin.x;
	float y = cosf(i/segs * 6.28) * radius + origin.y;
		CGContextMoveToPoint(context, x, y);
	} else {		
		CGContextAddLineToPoint(context, x, y);
[[UIColor greenColor] setFill];
[[UIColor whiteColor] setStroke];
CGContextDrawPath(context, kCGPathFillStroke);

Draw an Image

UIImage *img = [UIImage imageNamed:@"Icon.png"];
[img drawInRect:CGRectMake(100, 100, 100, 100)];

Draw Text in a View

// Put a string to the window
NSString  *str = [NSString stringWithString:@"Hello World"]; 
UIFont *font = [UIFont systemFontOfSize:32.0];
[[UIColor blackColor] setFill];
[str drawAtPoint:CGPointMake(40.0, 60.0) withFont:font];

Generating Random Numbers

int r = arc4random()%100;
float f = (arc4random()%100) / 100.0f;
NSLog(@"Random int: %i  Random float: %.4f", r, f);

Parsing the Contents of a Text File

NSString *file = [[NSBundle mainBundle] pathForResource:@"Schools" ofType:@"txt" ];
NSLog(@"File is at %@",file);
NSString *data = [NSString stringWithContentsOfFile:file usedEncoding:nil error:nil];
NSLog(@"Data is %i characters", [data length]);
NSArray *arr = [data componentsSeparatedByString:@"\n"];
NSLog(@"Array has %i elements", [arr count]);
for (NSString *line in arr){
    NSArray *components = [line componentsSeparatedByString:@","];
    NSLog(@"%f",[[components objectAtIndex:0] floatValue]);

Useful NSXMLParser Delegate Methods

-(void)parser:(NSXMLParser *)parser didStartElement:(NSString *)elementName namespaceURI:(NSString *)namespaceURI qualifiedName:(NSString *)qName attributes:(NSDictionary *)attributeDict{
    cachedElementName = [NSString stringWithFormat:@"%@", elementName];

-(void)parser:(NSXMLParser *)parser foundCharacters:(NSString *)string{
    NSLog(@"%@: %@",cachedElementName,string);

-(void)parserDidEndDocument:(NSXMLParser *)parser{
    NSLog(@"Done with %@", [parser description]);

Starting an XML Parser

NSString *url_string = @"";
NSURL *url = [NSURL URLWithString:url_string];
NSXMLParser *parser = [[NSXMLParser alloc] initWithContentsOfURL:url];
parser.delegate = self;
[parser parse];


@proxyToru Toru Hasegawa is co-director of Proxy. Toru investigates the culture of innovation and technology in architecture as an adjunct assistant professor at the Columbia University Graduate School of Architecture Toru is a co-director of the Cloud Lab.

@ProxyMark Mark Collins is co-director of Proxy, an innovation-focused design firm working across a range of scales and platforms. Mark is as an adjunct assistant professor at the Columbia University Graduate School of Architecture where he co-directs the Cloud Lab.