Project import
diff --git a/Libraries/DecoderWrapper/DecodedFrame.h b/Libraries/DecoderWrapper/DecodedFrame.h
new file mode 100644
index 0000000..3f5e197
--- /dev/null
+++ b/Libraries/DecoderWrapper/DecodedFrame.h
@@ -0,0 +1,39 @@
+//
+//  DecodedFrame.h
+//  DecoderWrapper
+//
+//  Created by Mike Montalbo on 6/4/12.
+//  Copyright (c) 2012 Dropcam. All rights reserved.
+//
+
+#import <CoreVideo/CoreVideo.h>
+
+@interface DecodedFrame : NSObject
++ (DecodedFrame *)decodedFrameWithWidth:(NSUInteger)width 
+                                 height:(NSUInteger)height
+                            imageBuffer:(CVImageBufferRef)imageBuffer
+                              timestamp:(int64_t)pts;
+
+- (id)initWithWidth:(NSUInteger)width 
+             height:(NSUInteger)height
+             yPlane:(NSData *)yPlane
+             uPlane:(NSData *)uPlane
+             vPlane:(NSData *)vPlane
+          timestamp:(int64_t)pts;
+
+- (id)initWithWidth:(NSUInteger)width
+             height:(NSUInteger)height
+        imageBuffer:(CVImageBufferRef)imageBuffer
+          timestamp:(int64_t)pts;
+
+- (NSData *)sceneChangeData;
++ (BOOL)isSceneChangeFromData:(NSData *)data1 toData:(NSData *)data2;
+
+@property (nonatomic, strong) NSData *yPlane;
+@property (nonatomic, strong) NSData *uPlane;
+@property (nonatomic, strong) NSData *vPlane;
+@property (nonatomic, assign) int64_t PTS;
+@property (nonatomic, assign) CVImageBufferRef imageBuffer;
+@property (nonatomic, assign) NSUInteger width;
+@property (nonatomic, assign) NSUInteger height;
+@end
diff --git a/Libraries/DecoderWrapper/DecodedFrame.m b/Libraries/DecoderWrapper/DecodedFrame.m
new file mode 100644
index 0000000..8b8e7cc
--- /dev/null
+++ b/Libraries/DecoderWrapper/DecodedFrame.m
@@ -0,0 +1,124 @@
+//
+//  DecodedFrame.m
+//  DecoderWrapper
+//
+//  Created by Mike Montalbo on 6/4/12.
+//  Copyright (c) 2012 Dropcam. All rights reserved.
+//
+
+#import "DecodedFrame.h"
+#import "NLCommonLoggingNVP.h"
+
+static const CGFloat kDCSceneChangeThreshold = 100.0;
+static const NSInteger kDCSubsampleRatio = 500;
+
+@implementation DecodedFrame
+
++ (DecodedFrame *)decodedFrameWithWidth:(NSUInteger)width
+                                 height:(NSUInteger)height
+                            imageBuffer:(CVImageBufferRef)imageBuffer
+                              timestamp:(int64_t)pts {
+  return [[DecodedFrame alloc] initWithWidth:width
+                                      height:height
+                                 imageBuffer:imageBuffer
+                                   timestamp:pts];
+}
+
+- (id)initWithWidth:(NSUInteger)width
+             height:(NSUInteger)height
+        yPlane:(NSData *)yPlane
+        uPlane:(NSData *)uPlane
+        vPlane:(NSData *)vPlane
+     timestamp:(int64_t)pts {
+  if((self = [super init])) {
+    _width = width;
+    _height = height;
+    
+    _yPlane = yPlane;
+    _uPlane = uPlane;
+    _vPlane = vPlane;
+    _PTS = pts;
+  }
+  
+  return self;
+}
+
+- (id)initWithWidth:(NSUInteger)width 
+             height:(NSUInteger)height
+        imageBuffer:(CVImageBufferRef)imageBuffer
+          timestamp:(int64_t)pts {
+  if((self = [super init])) {
+    _width = width;
+    _height = height;
+    
+    _imageBuffer = imageBuffer;
+    CVBufferRetain(_imageBuffer);
+    
+    _PTS = pts;
+  }
+  
+  return self;
+}
+
+- (NSData *)sceneChangeData {
+
+  // only the Y'UV planes version of DecodedFrame is used, it seems.
+  // we'll just use the luma (Y') plane.
+  
+  unsigned char *lumaBytes;
+  size_t lumaLength;
+  
+  if (self.imageBuffer) {
+    CVPixelBufferLockBaseAddress(self.imageBuffer, 0);
+    lumaBytes = (unsigned char *)CVPixelBufferGetBaseAddress(self.imageBuffer);
+    lumaLength = CVPixelBufferGetDataSize(self.imageBuffer);
+  }
+  else {
+    lumaBytes = (unsigned char *)self.yPlane.bytes;
+    lumaLength = self.yPlane.length;
+  }
+
+  NSMutableData *data = [NSMutableData dataWithLength:lumaLength / kDCSubsampleRatio];
+  unsigned char *dataBytes = (unsigned char *)[data mutableBytes];
+
+  float sum = 0;
+  
+  for(int i = 0; i < data.length; i++) {
+    dataBytes[i] = lumaBytes[i * kDCSubsampleRatio];
+    sum += dataBytes[i];
+  }
+
+  if (self.imageBuffer) {
+    CVPixelBufferUnlockBaseAddress(self.imageBuffer, 0);
+  }
+
+  return [NSData dataWithData:data];
+}
+
++ (BOOL)isSceneChangeFromData:(NSData *)data1 toData:(NSData *)data2 {  
+  if((data1.length) != (data2.length))
+    return NO;
+
+  unsigned char *bytes1 = (unsigned char *)[data1 bytes];
+  unsigned char *bytes2 = (unsigned char *)[data2 bytes];
+
+  float diffSum = 0;
+  
+  for(int i = 0; i < data1.length; i++) {
+    int diff = bytes1[i]-bytes2[i];
+    diffSum += diff * diff;
+  }
+    
+  BOOL isSceneChange = ((diffSum / data1.length) > kDCSceneChangeThreshold);
+    
+  if(isSceneChange)
+    NLLogNVPInfo(@"scenechange detected, diff = %f", (diffSum / data1.length));
+  
+  return isSceneChange;
+}
+
+- (void)dealloc {
+  CVBufferRelease(_imageBuffer);
+}
+
+@end
diff --git a/Libraries/DecoderWrapper/VideoDecoder.h b/Libraries/DecoderWrapper/VideoDecoder.h
new file mode 100644
index 0000000..90d1ff7
--- /dev/null
+++ b/Libraries/DecoderWrapper/VideoDecoder.h
@@ -0,0 +1,45 @@
+//
+//  VideoDecoder.h
+//  DecoderWrapper
+//
+//  Copyright 2010 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+#import <CoreVideo/CoreVideo.h>
+#import <UIKit/UIKit.h>
+
+#import "DecodedFrame.h"
+
+enum VideoCodecType {
+  kVCT_H264
+};
+
+enum VideoColorSpace {
+  VideoColorSpaceRGB,
+  VideoColorSpaceYUV
+};
+
+typedef void (*LogCallbackfn)(int level, const char *module, const char* logLine);
+
+@interface VideoDecoder : NSObject
+
++ (void)staticInitialize;
++ (void)registerLogCallback:(LogCallbackfn)fn;
+
+- (id)initWithCodec:(enum VideoCodecType)codecType 
+         colorSpace:(enum VideoColorSpace)colorSpace 
+              width:(int)width 
+             height:(int)height 
+        privateData:(NSData*)privateData
+        allowChunks:(BOOL)allowChunks;
+
+- (void)decodeFrame:(NSData*)frameData hasStartCode:(BOOL)hasStartCode PTS:(int64_t)PTS;
+
+- (BOOL)isFrameReady;
+- (UIImage *)getDecodedRGBFrame;
+- (DecodedFrame *)getDecodedFrame;
+- (NSUInteger)getDecodedFrameWidth;
+- (NSUInteger)getDecodedFrameHeight;
+
+@end
diff --git a/Libraries/DecoderWrapper/VideoDecoder.m b/Libraries/DecoderWrapper/VideoDecoder.m
new file mode 100644
index 0000000..cc4ba55
--- /dev/null
+++ b/Libraries/DecoderWrapper/VideoDecoder.m
@@ -0,0 +1,300 @@
+//
+//  VideoDecoder.m
+//  DecoderWrapper
+//
+//  Copyright 2010 Dropcam. All rights reserved.
+//
+
+#import "VideoDecoder.h"
+
+#import <avformat.h>
+#import <avcodec.h>
+#import <swscale.h>
+#import "NLCommonLoggingNVP.h"
+
+//#define SHOW_DEBUG_MV
+
+#define OUTPUT_COLOR_FORMAT PIX_FMT_RGB555BE
+
+LogCallbackfn g_logCallbackFn = NULL;
+
+const uint8_t startCode[] = { 0x00, 0x00, 0x00, 0x01 };
+#define INIT_INPUT_BUFFER_SIZE 2048
+
+static uint8_t get_h264_nalu_type(NSData *packet)
+{
+  if ([packet length] < 5)
+    return -1;
+  
+  uint8_t header[5] = {0};
+  [packet getBytes:header length:sizeof(header)];
+  
+  // Assuming all packets begin with 4-byte marker
+  uint8_t nal_unit_type = (header[4] & 0x1F);
+  return nal_unit_type;
+}
+
+static void av_log_callback(void *ptr, 
+                            int level, 
+                            const char *fmt, 
+                            va_list vl)
+{
+  static char line[1024] = {0};
+  const char *module = "unknown";
+  
+  if (ptr)
+  {
+    AVClass *avc = *(AVClass**) ptr;
+    module = avc->item_name(ptr);
+  }
+  
+  vsnprintf(line, sizeof(line), fmt, vl);
+  
+  if (g_logCallbackFn) {
+    g_logCallbackFn(level, module, line);
+  }
+}
+
+@implementation VideoDecoder {
+  struct AVCodec *codec;
+  struct AVCodecContext *codecCtx;
+  struct AVFrame *dstFrame;
+  struct SwsContext *convertCtx;
+  uint8_t *outputBuf;
+  int outputBufLen;
+  
+  BOOL outputInit;
+  BOOL frameReady;
+  
+  uint8_t *inputBuf;
+  int inputBufLen;
+  
+  AVFrame *_srcFrame;
+  AVFrame *_dstFrame;
+  AVCodecContext *_codecCtx;
+  enum AVPixelFormat _outputColorSpace;
+}
+
++ (void)staticInitialize {
+  av_register_all();  
+}
+
++ (void)registerLogCallback:(LogCallbackfn)fn {
+  g_logCallbackFn = fn;
+  av_log_set_level(AV_LOG_DEBUG);  
+  av_log_set_callback(av_log_callback);  
+}
+
+- (id)initWithCodec:(enum VideoCodecType)codecType 
+         colorSpace:(enum VideoColorSpace)colorSpace 
+              width:(int)width 
+             height:(int)height 
+        privateData:(NSData*)privateData
+        allowChunks:(BOOL)allowChunks {
+  if ((self = [super init])) {
+    
+    inputBufLen = INIT_INPUT_BUFFER_SIZE;
+    inputBuf = malloc(inputBufLen);
+    
+    if (colorSpace == VideoColorSpaceRGB) {
+      _outputColorSpace = PIX_FMT_RGB555BE;
+    } else {
+      _outputColorSpace = PIX_FMT_YUV420P;
+    }
+    
+    codec = avcodec_find_decoder(CODEC_ID_H264);
+    _codecCtx = avcodec_alloc_context3(codec);
+    
+    _codecCtx->thread_count = 0;
+    _codecCtx->thread_type = FF_THREAD_FRAME;
+    
+    // Note: for H.264 RTSP streams, the width and height are usually not specified (width and height are 0).  
+    // These fields will become filled in once the first frame is decoded and the SPS is processed.
+    _codecCtx->width = width;
+    _codecCtx->height = height;
+    
+    if (privateData != nil) {
+      _codecCtx->extradata = av_malloc([privateData length]);
+      _codecCtx->extradata_size = (int)[privateData length];
+      [privateData getBytes:_codecCtx->extradata length:_codecCtx->extradata_size];
+    }
+    
+    _codecCtx->pix_fmt = PIX_FMT_YUV420P;
+#ifdef SHOW_DEBUG_MV
+    _codecCtx->debug_mv = 0xFF;
+#endif
+    
+    _codecCtx->flags |= CODEC_FLAG_EMU_EDGE;
+    _codecCtx->flags2 |= CODEC_FLAG2_FAST;
+    
+    if (allowChunks) {
+      _codecCtx->flags2 |= CODEC_FLAG2_CHUNKS;
+    }
+    
+    _srcFrame = av_frame_alloc();
+    _dstFrame = av_frame_alloc();
+
+    // Lock required around calls to avcodec_open/close
+    @synchronized(self.class) {
+      int res = avcodec_open2(_codecCtx, codec, NULL);
+
+      if (res < 0) {
+        NLLogNVPWarn(@"Failed to initialize decoder");
+      }
+    }
+  }
+  
+  return self;  
+}
+
+- (void)decodeFrame:(NSData*)frameData hasStartCode:(BOOL)hasStartCode PTS:(int64_t)PTS {
+  AVPacket packet = {0};
+  
+  if (frameData) {
+    int neededSize = (int)[frameData length] + 4 + (FF_INPUT_BUFFER_PADDING_SIZE);
+    if (inputBufLen < neededSize) {
+      inputBufLen = neededSize * 2;
+      inputBuf = realloc(inputBuf, inputBufLen);
+    }
+    
+    packet.data = inputBuf;
+
+    if (hasStartCode) {
+      memcpy(inputBuf, [frameData bytes], [frameData length]);
+      packet.size = (int)[frameData length];
+    } else {
+      memcpy(inputBuf, startCode, 4);
+      memcpy(inputBuf + 4, [frameData bytes], [frameData length]);
+      packet.size = (int)[frameData length] + 4;
+    }
+  }
+  
+  packet.pts = PTS;
+  
+  int frameFinished = 0;
+  int res = avcodec_decode_video2(_codecCtx, _srcFrame, &frameFinished, &packet);
+  if (res < 0) {
+#ifdef DEBUG
+    NLLogNVPInfo(@"avcodec_decode_video2 res: %d", res);
+#endif
+    return;
+  }
+  
+  if (frameFinished)
+    frameReady = YES;
+}
+
+- (void)initializeOutput {
+// Need to delay initializing the output buffers because we don't know the dimensions until we decode the first frame.
+  if (!outputInit && _codecCtx->width > 0 && _codecCtx->height > 0) {
+    outputBufLen = avpicture_get_size(_outputColorSpace, _codecCtx->width, _codecCtx->height);
+    outputBuf = av_malloc(outputBufLen);
+    
+    avpicture_fill((AVPicture*)_dstFrame, outputBuf, _outputColorSpace, _codecCtx->width, _codecCtx->height);
+    
+    convertCtx = sws_getContext(_codecCtx->width, _codecCtx->height, _codecCtx->pix_fmt,  _codecCtx->width,
+                                _codecCtx->height, _outputColorSpace, SWS_FAST_BILINEAR, NULL, NULL, NULL);
+    
+    outputInit = YES;
+  }
+}
+
+- (BOOL)isFrameReady {
+  return frameReady;
+}
+
+- (UIImage *)getDecodedRGBFrame {
+  if (!frameReady)
+    return nil;
+  
+  [self initializeOutput];
+    
+  sws_scale(convertCtx, (const uint8_t**)_srcFrame->data, _srcFrame->linesize, 0, _codecCtx->height, _dstFrame->data, _dstFrame->linesize);
+  
+  frameReady = NO;
+
+  NSData *imgData = [NSData dataWithBytesNoCopy:outputBuf length:outputBufLen freeWhenDone:NO];
+  
+  NSUInteger width = [self getDecodedFrameWidth];
+  NSUInteger height = [self getDecodedFrameHeight];
+  
+  if (width && height && imgData) {
+    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
+    //The constants for specifying the alpha channel information are declared with the CGImageAlphaInfo type but can be passed to this parameter safely.
+    CGContextRef context = CGBitmapContextCreate((void*)[imgData bytes], width, height, 5, width*2, colorSpace, (CGBitmapInfo)kCGImageAlphaNoneSkipFirst);
+    
+    CGImageRef imageRef = CGBitmapContextCreateImage(context);
+    UIImage *result = [UIImage imageWithCGImage:imageRef];
+    
+    CGImageRelease(imageRef);
+    CGContextRelease(context);
+    CGColorSpaceRelease(colorSpace);
+    
+    return result;
+  }
+  
+  return nil;
+}
+
+- (DecodedFrame *)getPixelBufferFromFrame:(struct AVFrame *)frame {
+  NSUInteger width = [self getDecodedFrameWidth];
+  NSUInteger height = [self getDecodedFrameHeight];
+  
+  NSData *yPlane = [NSData dataWithBytesNoCopy:frame->data[0] length:frame->linesize[0]*height freeWhenDone:NO];
+  NSData *uPlane = [NSData dataWithBytesNoCopy:frame->data[1] length:frame->linesize[1]*height/2 freeWhenDone:NO];
+  NSData *vPlane = [NSData dataWithBytesNoCopy:frame->data[2] length:frame->linesize[2]*height/2 freeWhenDone:NO];
+  
+  DecodedFrame *decodedFrame = [[DecodedFrame alloc] initWithWidth:width height:height yPlane:yPlane uPlane:uPlane vPlane:vPlane timestamp:0];
+  
+  return decodedFrame;
+}
+
+- (DecodedFrame *)getDecodedFrame {
+  if (!frameReady)
+    return nil;
+  
+  frameReady = NO;
+  
+  DecodedFrame *frame = nil;
+  
+  // Run it through sws_scale if the image is not packed
+  if (_srcFrame->linesize[0] != _srcFrame->width) {
+    [self initializeOutput];
+    sws_scale(convertCtx, (const uint8_t**)_srcFrame->data, _srcFrame->linesize, 0, _codecCtx->height, _dstFrame->data, _dstFrame->linesize);
+    frame = [self getPixelBufferFromFrame:_dstFrame];
+  } else {
+    // Otherwise we can just return the source frame
+    frame = [self getPixelBufferFromFrame:_srcFrame];
+  }
+
+  if (_srcFrame->pkt_pts != AV_NOPTS_VALUE && _srcFrame->pkt_pts != 0) {
+    frame.PTS = _srcFrame->pkt_pts;
+  }
+  
+  return frame;
+}
+
+- (NSUInteger)getDecodedFrameWidth {
+  return _codecCtx->width;
+}
+
+- (NSUInteger)getDecodedFrameHeight {
+  return _codecCtx->height;
+}
+
+- (void)dealloc {
+  free(inputBuf);
+  
+  // Lock required around calls to avcodec_open/close
+  @synchronized(self.class) {
+    av_free(_codecCtx->extradata);
+    avcodec_close(_codecCtx);
+    av_free(_codecCtx);
+    av_frame_free(&_srcFrame);
+    av_frame_free(&_dstFrame);
+    av_free(outputBuf);
+    sws_freeContext(convertCtx);
+  }
+}
+
+@end
diff --git a/Libraries/DropcamUtils/DebugLog.h b/Libraries/DropcamUtils/DebugLog.h
new file mode 100644
index 0000000..5961648
--- /dev/null
+++ b/Libraries/DropcamUtils/DebugLog.h
@@ -0,0 +1,14 @@
+/*
+ *  DebugLog.h
+ *  Dropcam
+ *
+ *  Created by Loren Kirkby on 1/8/10.
+ *  Copyright 2010 Dropcam. All rights reserved.
+ *
+ */
+
+#ifdef DEBUG
+#define Debug_NSLog(format, ...) NSLog(format, ## __VA_ARGS__)
+#else
+#define Debug_NSLog(format, ...) (void)0
+#endif
\ No newline at end of file
diff --git a/Libraries/DropcamUtils/Device.h b/Libraries/DropcamUtils/Device.h
new file mode 100644
index 0000000..f081476
--- /dev/null
+++ b/Libraries/DropcamUtils/Device.h
@@ -0,0 +1,31 @@
+//
+//  Device.h
+//  Dropcam
+//
+//  Created by Loren Kirkby on 12/29/09.
+//  Copyright 2009 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+extern NSString *kDeviceNameiPhone;
+extern NSString *kDeviceNameiPod;
+extern NSString *kDeviceNameiPad;
+
+enum kDeviceClass {
+  DEVICE_CLASS_1ST_GEN,   // iPod Touch 1st-gen, iPhone, iPhone 3G
+  DEVICE_CLASS_2ND_GEN,   // 2nd-gen iPod Touch
+  DEVICE_CLASS_3RD_GEN,   // 3rd-gen iPod Touch, iPhone 3GS
+  DEVICE_CLASS_4TH_GEN,   // 1st-gen iPad
+  DEVICE_CLASS_5TH_GEN,   // iPad 2 and 3
+  DEVICE_CLASS_SIMULATOR, // iPhone Simulator
+  DEVICE_CLASS_NEXT_GEN   // Unknown - assumed to be faster than 1st, 2nd or 3rd-gen
+};
+
+@interface Device : NSObject {
+}
+
++(enum kDeviceClass)getDeviceClass; 
++(NSString *)getMachineName; 
++(NSString *)getDeviceName; 
+@end
diff --git a/Libraries/DropcamUtils/Device.m b/Libraries/DropcamUtils/Device.m
new file mode 100644
index 0000000..64c0315
--- /dev/null
+++ b/Libraries/DropcamUtils/Device.m
@@ -0,0 +1,104 @@
+//
+//  Device.m
+//  Dropcam
+//
+//  Created by Loren Kirkby on 12/29/09.
+//  Copyright 2009 Dropcam. All rights reserved.
+//
+//  Found here: https://devforums.apple.com/message/146094#146094
+
+#import "Device.h"
+#import "DebugLog.h"
+#import "NLCommonLoggingNVP.h"
+#include <sys/types.h>
+#include <sys/sysctl.h>
+
+NSString *kDeviceNameiPhone = @"iPhone";
+NSString *kDeviceNameiPod = @"iPod";
+NSString *kDeviceNameiPad = @"iPad";
+
+@implementation Device
+
++(enum kDeviceClass)getDeviceClass {
+  NSString *machineName = [Device getMachineName];
+  
+  if (machineName) {
+    NLLogNVPInfo(@"Machine name: %@", machineName);
+    
+    if ([machineName isEqualToString:@"iPhone1,1"])   // 1st-gen iPhone
+      return DEVICE_CLASS_1ST_GEN;
+    if ([machineName isEqualToString:@"iPhone1,2"])   // iPhone 3G
+      return DEVICE_CLASS_1ST_GEN;
+    if ([machineName isEqualToString:@"iPod1,1"])     // 1st-gen iPod Touch
+      return DEVICE_CLASS_1ST_GEN;
+    if ([machineName isEqualToString:@"iPod1,2"])     // 2nd-gen iPod Touch
+      return DEVICE_CLASS_2ND_GEN;
+    if ([machineName isEqualToString:@"iPod3,1"])     // 3rd-gen iPod Touch
+      return DEVICE_CLASS_3RD_GEN;
+    if ([machineName isEqualToString:@"iPod4,1"])     // 4th-gen iPod Touch
+      return DEVICE_CLASS_4TH_GEN;
+    if ([machineName isEqualToString:@"iPhone2,1"])   // iPhone 3GS
+      return DEVICE_CLASS_3RD_GEN;
+    if ([machineName isEqualToString:@"iPad1,1"])     // 1st-gen iPad (A4)
+      return DEVICE_CLASS_4TH_GEN;
+    if ([machineName isEqualToString:@"iPhone3,1"])   // iPhone 4 (A4)
+      return DEVICE_CLASS_4TH_GEN;
+    if ([machineName isEqualToString:@"iPad2,1"])     // iPad 2 Wi-Fi
+      return DEVICE_CLASS_5TH_GEN;
+    if ([machineName isEqualToString:@"iPad2,2"])     // iPad 2 GSM
+      return DEVICE_CLASS_5TH_GEN;
+    if ([machineName isEqualToString:@"iPad2,3"])     // iPad 2 CDMA
+      return DEVICE_CLASS_5TH_GEN; 
+    if ([machineName isEqualToString:@"iPad2,4"])     // iPad 2 New
+      return DEVICE_CLASS_5TH_GEN; 
+    if ([machineName isEqualToString:@"iPad3,1"])     // iPad 3 Wi-Fi
+      return DEVICE_CLASS_5TH_GEN;
+    if ([machineName isEqualToString:@"iPad3,2"])     // iPad 3 GSM
+      return DEVICE_CLASS_5TH_GEN;
+    if ([machineName isEqualToString:@"iPad3,3"])     // iPad 3 CDMA
+      return DEVICE_CLASS_5TH_GEN; 
+    if ([machineName isEqualToString:@"i386"])
+      return DEVICE_CLASS_SIMULATOR;
+    if ([machineName isEqualToString:@"x86_64"])
+      return DEVICE_CLASS_SIMULATOR;
+  }
+  
+  // If we don't know what it is, assume it's a next-gen (and faster) device
+  return DEVICE_CLASS_NEXT_GEN;
+}
+
++(NSString *)getMachineName {
+  char deviceNameBuf[256] = {0};
+  size_t bufSize = sizeof(deviceNameBuf);
+  
+  NSString *machineName = nil;
+  
+  if (sysctlbyname("hw.machine", deviceNameBuf, &bufSize, NULL, 0) == 0) {
+    machineName = [NSString stringWithCString:deviceNameBuf encoding:NSUTF8StringEncoding];
+  }
+  
+  return machineName;
+}
+
++(NSString *)getDeviceName {
+  NSString *machineName = [Device getMachineName];
+
+  if (machineName) {
+    NLLogNVPInfo(@"Machine name: %@", machineName);
+    
+    if ([machineName hasPrefix:@"iPhone"])
+      return kDeviceNameiPhone;
+    if ([machineName hasPrefix:@"iPad"])
+      return kDeviceNameiPad;
+    if ([machineName hasPrefix:@"iPod"])
+      return kDeviceNameiPod;
+    if ([machineName isEqualToString:@"i386"])
+      return kDeviceNameiPhone;
+    if ([machineName isEqualToString:@"x86_64"])
+      return kDeviceNameiPhone;
+  }
+  
+  return kDeviceNameiPhone;
+}
+
+@end
diff --git a/Libraries/DropcamUtils/ImageUtil.h b/Libraries/DropcamUtils/ImageUtil.h
new file mode 100644
index 0000000..e2c310d
--- /dev/null
+++ b/Libraries/DropcamUtils/ImageUtil.h
@@ -0,0 +1,20 @@
+//
+//  Image.h
+//  DropcamUtils
+//
+//  Created by Loren Kirkby on 10/1/12.
+//  Copyright (c) 2012 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+@interface ImageUtil : NSObject
+
++ (CGRect) calcCenterFittedRectWithBitmapWidth:(CGFloat)bitmapWidth
+                                  bitmapHeight:(CGFloat)bitmapHeight
+                                   canvasWidth:(CGFloat)canvasWidth
+                                  canvasHeight:(CGFloat)canvasHeight;
+
++ (CGRect)scaleRect:(CGRect)rect byScaleFactor:(CGFloat)scale;
+
+@end
diff --git a/Libraries/DropcamUtils/ImageUtil.m b/Libraries/DropcamUtils/ImageUtil.m
new file mode 100644
index 0000000..6702a24
--- /dev/null
+++ b/Libraries/DropcamUtils/ImageUtil.m
@@ -0,0 +1,47 @@
+//
+//  Image.m
+//  DropcamUtils
+//
+//  Created by Loren Kirkby on 10/1/12.
+//  Copyright (c) 2012 Dropcam. All rights reserved.
+//
+
+#import "ImageUtil.h"
+
+@implementation ImageUtil
+
++ (CGRect) calcCenterFittedRectWithBitmapWidth:(CGFloat)bitmapWidth
+                                  bitmapHeight:(CGFloat)bitmapHeight
+                                   canvasWidth:(CGFloat)canvasWidth
+                                  canvasHeight:(CGFloat)canvasHeight {
+  CGFloat bitmapAr = bitmapWidth / bitmapHeight;
+  CGFloat canvasAr = canvasWidth / canvasHeight;
+  
+  CGFloat dstWidth;
+  CGFloat dstHeight;
+  
+  if(ABS(bitmapAr - canvasAr) < 0.01) {  // If source and destination bitmaps have very, very similar aspect ratios, just fill the canvas. I'm looking at you, iPhone 5, with your 16:9.014 screen.
+    dstHeight = canvasHeight;
+    dstWidth = canvasWidth;
+  }
+  else if (bitmapAr > canvasAr) {
+    dstWidth = canvasWidth;
+    dstHeight = dstWidth / bitmapAr;
+  } else {
+    dstHeight = canvasHeight;
+    dstWidth = dstHeight * bitmapAr;
+  }
+  
+  CGFloat centerX = (canvasWidth - dstWidth) / 2.0f;
+  CGFloat centerY = (canvasHeight - dstHeight) / 2.0f;
+  CGRect dst = CGRectMake(centerX, centerY, dstWidth, dstHeight);
+  
+  return dst;
+}
+
++ (CGRect)scaleRect:(CGRect)rect byScaleFactor:(CGFloat)scale {
+  return CGRectMake(rect.origin.x * scale, rect.origin.y * scale, rect.size.width * scale, rect.size.height * scale);
+}
+
+@end
+
diff --git a/Libraries/DropcamUtils/OSUtil.h b/Libraries/DropcamUtils/OSUtil.h
new file mode 100644
index 0000000..51dd7e4
--- /dev/null
+++ b/Libraries/DropcamUtils/OSUtil.h
@@ -0,0 +1,19 @@
+//
+//  OSUtil.h
+//  Dropcam
+//
+//  Created by Loren Kirkby on 6/23/10.
+//  Copyright 2010 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+
+@interface OSUtil : NSObject {
+  
+}
+
++ (BOOL)isIPad;
++ (BOOL)isHighRes;
+
+@end
diff --git a/Libraries/DropcamUtils/OSUtil.m b/Libraries/DropcamUtils/OSUtil.m
new file mode 100644
index 0000000..5bbd1b1
--- /dev/null
+++ b/Libraries/DropcamUtils/OSUtil.m
@@ -0,0 +1,37 @@
+//
+//  OSUtil.m
+//  Dropcam
+//
+//  Created by Loren Kirkby on 6/23/10.
+//  Copyright 2010 Dropcam. All rights reserved.
+//
+
+#import "OSUtil.h"
+
+
+@implementation OSUtil
+
++ (CGSize)getCurrentScreenSize {
+  UIScreen *mainScreen = [UIScreen mainScreen];
+  if ([mainScreen respondsToSelector:@selector(currentMode)]) {
+    return mainScreen.currentMode.size;
+  }
+  
+  // Assume old iPhoneOS
+  return CGSizeMake(320, 480);
+}
+
++ (BOOL)isHighRes {
+  CGSize screenSize = [OSUtil getCurrentScreenSize];
+  return screenSize.width > 320;
+}
+
++ (BOOL)isIPad {
+#if __IPHONE_OS_VERSION_MAX_ALLOWED >= 30200
+  return (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad);
+#endif
+  
+  return NO;
+}
+
+@end
diff --git a/Libraries/DropcamUtils/Reachability.h b/Libraries/DropcamUtils/Reachability.h
new file mode 100644
index 0000000..73dad85
--- /dev/null
+++ b/Libraries/DropcamUtils/Reachability.h
@@ -0,0 +1,89 @@
+/*
+ 
+ File: Reachability.h
+ Abstract: Basic demonstration of how to use the SystemConfiguration Reachablity APIs.
+ 
+ Version: 2.2
+ 
+ Disclaimer: IMPORTANT:  This Apple software is supplied to you by Apple Inc.
+ ("Apple") in consideration of your agreement to the following terms, and your
+ use, installation, modification or redistribution of this Apple software
+ constitutes acceptance of these terms.  If you do not agree with these terms,
+ please do not use, install, modify or redistribute this Apple software.
+ 
+ In consideration of your agreement to abide by the following terms, and subject
+ to these terms, Apple grants you a personal, non-exclusive license, under
+ Apple's copyrights in this original Apple software (the "Apple Software"), to
+ use, reproduce, modify and redistribute the Apple Software, with or without
+ modifications, in source and/or binary forms; provided that if you redistribute
+ the Apple Software in its entirety and without modifications, you must retain
+ this notice and the following text and disclaimers in all such redistributions
+ of the Apple Software.
+ Neither the name, trademarks, service marks or logos of Apple Inc. may be used
+ to endorse or promote products derived from the Apple Software without specific
+ prior written permission from Apple.  Except as expressly stated in this notice,
+ no other rights or licenses, express or implied, are granted by Apple herein,
+ including but not limited to any patent rights that may be infringed by your
+ derivative works or by other works in which the Apple Software may be
+ incorporated.
+ 
+ The Apple Software is provided by Apple on an "AS IS" basis.  APPLE MAKES NO
+ WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION THE IMPLIED
+ WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND OPERATION ALONE OR IN
+ COMBINATION WITH YOUR PRODUCTS.
+ 
+ IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL OR
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
+ GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR
+ DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF
+ CONTRACT, TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF
+ APPLE HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ 
+ Copyright (C) 2010 Apple Inc. All Rights Reserved.
+ 
+*/
+
+
+#import <Foundation/Foundation.h>
+#import <SystemConfiguration/SystemConfiguration.h>
+#import <netinet/in.h>
+
+typedef enum {
+	NotReachable = 0,
+	ReachableViaWiFi,
+	ReachableViaWWAN
+} NetworkStatus;
+#define kReachabilityChangedNotification @"kNetworkReachabilityChangedNotification"
+
+@interface Reachability: NSObject
+{
+	BOOL localWiFiRef;
+	SCNetworkReachabilityRef reachabilityRef;
+}
+
+//reachabilityWithHostName- Use to check the reachability of a particular host name. 
++ (Reachability*) reachabilityWithHostName: (NSString*) hostName;
+
+//reachabilityWithAddress- Use to check the reachability of a particular IP address. 
++ (Reachability*) reachabilityWithAddress: (const struct sockaddr_in*) hostAddress;
+
+//reachabilityForInternetConnection- checks whether the default route is available.  
+//  Should be used by applications that do not connect to a particular host
++ (Reachability*) reachabilityForInternetConnection;
+
+//reachabilityForLocalWiFi- checks whether a local wifi connection is available.
++ (Reachability*) reachabilityForLocalWiFi;
+
+//Start listening for reachability notifications on the current run loop
+- (BOOL) startNotifier;
+- (void) stopNotifier;
+
+- (NetworkStatus) currentReachabilityStatus;
+//WWAN may be available, but not active until a connection has been established.
+//WiFi may require a connection for VPN on Demand.
+- (BOOL) connectionRequired;
+@end
+
+
diff --git a/NexusVideoPlayer/AAC.h b/NexusVideoPlayer/AAC.h
new file mode 100644
index 0000000..4d43846
--- /dev/null
+++ b/NexusVideoPlayer/AAC.h
@@ -0,0 +1,19 @@
+//
+//  AAC.h
+//  Dropcam
+//
+//  Created by Loren Kirkby on 1/22/10.
+//  Copyright 2010 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+struct AACAudioSpecificConfig {
+  int objectType;
+  int frequency;
+  int channelConfig;
+  int frameLength;
+};
+
+BOOL DecodeAACAudioSpecificConfig(NSData* data, struct AACAudioSpecificConfig* asc);
+
diff --git a/NexusVideoPlayer/AAC.m b/NexusVideoPlayer/AAC.m
new file mode 100644
index 0000000..72cf118
--- /dev/null
+++ b/NexusVideoPlayer/AAC.m
@@ -0,0 +1,44 @@
+//
+//  AAC.m
+//  Dropcam
+//
+//  Created by Loren Kirkby on 1/22/10.
+//  Copyright 2010 Dropcam. All rights reserved.
+//
+
+#import "AAC.h"
+#import "BitParser.h"
+
+// A very simple AAC AudioSpecificConfig parser - doesn't handle some of the more complex AOTs
+// Using http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio for reference
+
+static const int sampleFreqTable[] = {
+  96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000,
+  12000, 11025, 8000, 7350 
+};
+
+BOOL DecodeAACAudioSpecificConfig(NSData* data, struct AACAudioSpecificConfig* asc) {
+  BitParser *parser = [BitParser parserWithData:data];
+  
+  uint32_t objectType = 0;
+  [parser readBits:5 into:&objectType];
+  
+  uint32_t sampleFreqIdx = 0;
+  [parser readBits:4 into:&sampleFreqIdx];
+  
+  if (sampleFreqIdx > 12)
+    return NO;
+  
+  uint32_t channelConfig = 0;
+  [parser readBits:4 into:&channelConfig];
+  
+  uint32_t frameLength = 0;
+  [parser readBits:1 into:&frameLength];
+  
+  asc->objectType = objectType;
+  asc->frequency = sampleFreqTable[sampleFreqIdx];
+  asc->channelConfig = channelConfig;
+  asc->frameLength = (frameLength == 0) ? 1024 : 960;
+  
+  return YES;
+}
diff --git a/NexusVideoPlayer/AsyncSocket.h b/NexusVideoPlayer/AsyncSocket.h
new file mode 100644
index 0000000..33c1a7f
--- /dev/null
+++ b/NexusVideoPlayer/AsyncSocket.h
@@ -0,0 +1,659 @@
+//
+//  AsyncSocket.h
+//  
+//  This class is in the public domain.
+//  Originally created by Dustin Voss on Wed Jan 29 2003.
+//  Updated and maintained by Deusty Designs and the Mac development community.
+//
+//  http://code.google.com/p/cocoaasyncsocket/
+//
+
+#import <Foundation/Foundation.h>
+
+@class AsyncSocket;
+@class AsyncReadPacket;
+@class AsyncWritePacket;
+
+extern NSString *const AsyncSocketException;
+extern NSString *const AsyncSocketErrorDomain;
+
+enum AsyncSocketError
+{
+	AsyncSocketCFSocketError = kCFSocketError,	// From CFSocketError enum.
+	AsyncSocketNoError = 0,						// Never used.
+	AsyncSocketCanceledError,					// onSocketWillConnect: returned NO.
+	AsyncSocketConnectTimeoutError,
+	AsyncSocketReadMaxedOutError,               // Reached set maxLength without completing
+	AsyncSocketReadTimeoutError,
+	AsyncSocketWriteTimeoutError
+};
+typedef enum AsyncSocketError AsyncSocketError;
+
+@protocol AsyncSocketDelegate
+@optional
+
+/**
+ * In the event of an error, the socket is closed.
+ * You may call "unreadData" during this call-back to get the last bit of data off the socket.
+ * When connecting, this delegate method may be called
+ * before"onSocket:didAcceptNewSocket:" or "onSocket:didConnectToHost:".
+**/
+- (void)onSocket:(AsyncSocket *)sock willDisconnectWithError:(NSError *)err;
+
+/**
+ * Called when a socket disconnects with or without error.  If you want to release a socket after it disconnects,
+ * do so here. It is not safe to do that during "onSocket:willDisconnectWithError:".
+ * 
+ * If you call the disconnect method, and the socket wasn't already disconnected,
+ * this delegate method will be called before the disconnect method returns.
+**/
+- (void)onSocketDidDisconnect:(AsyncSocket *)sock;
+
+/**
+ * Called when a socket accepts a connection.  Another socket is spawned to handle it. The new socket will have
+ * the same delegate and will call "onSocket:didConnectToHost:port:".
+**/
+- (void)onSocket:(AsyncSocket *)sock didAcceptNewSocket:(AsyncSocket *)newSocket;
+
+/**
+ * Called when a new socket is spawned to handle a connection.  This method should return the run-loop of the
+ * thread on which the new socket and its delegate should operate. If omitted, [NSRunLoop currentRunLoop] is used.
+**/
+- (NSRunLoop *)onSocket:(AsyncSocket *)sock wantsRunLoopForNewSocket:(AsyncSocket *)newSocket;
+
+/**
+ * Called when a socket is about to connect. This method should return YES to continue, or NO to abort.
+ * If aborted, will result in AsyncSocketCanceledError.
+ * 
+ * If the connectToHost:onPort:error: method was called, the delegate will be able to access and configure the
+ * CFReadStream and CFWriteStream as desired prior to connection.
+ *
+ * If the connectToAddress:error: method was called, the delegate will be able to access and configure the
+ * CFSocket and CFSocketNativeHandle (BSD socket) as desired prior to connection. You will be able to access and
+ * configure the CFReadStream and CFWriteStream in the onSocket:didConnectToHost:port: method.
+**/
+- (BOOL)onSocketWillConnect:(AsyncSocket *)sock;
+
+/**
+ * Called when a socket connects and is ready for reading and writing.
+ * The host parameter will be an IP address, not a DNS name.
+**/
+- (void)onSocket:(AsyncSocket *)sock didConnectToHost:(NSString *)host port:(UInt16)port;
+
+/**
+ * Called when a socket has completed reading the requested data into memory.
+ * Not called if there is an error.
+**/
+- (void)onSocket:(AsyncSocket *)sock didReadData:(NSData *)data withTag:(long)tag;
+
+/**
+ * Called when a socket has read in data, but has not yet completed the read.
+ * This would occur if using readToData: or readToLength: methods.
+ * It may be used to for things such as updating progress bars.
+**/
+- (void)onSocket:(AsyncSocket *)sock didReadPartialDataOfLength:(NSUInteger)partialLength tag:(long)tag;
+
+/**
+ * Called when a socket has completed writing the requested data. Not called if there is an error.
+**/
+- (void)onSocket:(AsyncSocket *)sock didWriteDataWithTag:(long)tag;
+
+/**
+ * Called when a socket has written some data, but has not yet completed the entire write.
+ * It may be used to for things such as updating progress bars.
+**/
+- (void)onSocket:(AsyncSocket *)sock didWritePartialDataOfLength:(NSUInteger)partialLength tag:(long)tag;
+
+/**
+ * Called if a read operation has reached its timeout without completing.
+ * This method allows you to optionally extend the timeout.
+ * If you return a positive time interval (> 0) the read's timeout will be extended by the given amount.
+ * If you don't implement this method, or return a non-positive time interval (<= 0) the read will timeout as usual.
+ * 
+ * The elapsed parameter is the sum of the original timeout, plus any additions previously added via this method.
+ * The length parameter is the number of bytes that have been read so far for the read operation.
+ * 
+ * Note that this method may be called multiple times for a single read if you return positive numbers.
+**/
+- (NSTimeInterval)onSocket:(AsyncSocket *)sock
+  shouldTimeoutReadWithTag:(long)tag
+                   elapsed:(NSTimeInterval)elapsed
+                 bytesDone:(NSUInteger)length;
+
+/**
+ * Called if a write operation has reached its timeout without completing.
+ * This method allows you to optionally extend the timeout.
+ * If you return a positive time interval (> 0) the write's timeout will be extended by the given amount.
+ * If you don't implement this method, or return a non-positive time interval (<= 0) the write will timeout as usual.
+ * 
+ * The elapsed parameter is the sum of the original timeout, plus any additions previously added via this method.
+ * The length parameter is the number of bytes that have been written so far for the write operation.
+ * 
+ * Note that this method may be called multiple times for a single write if you return positive numbers.
+**/
+- (NSTimeInterval)onSocket:(AsyncSocket *)sock
+ shouldTimeoutWriteWithTag:(long)tag
+                   elapsed:(NSTimeInterval)elapsed
+                 bytesDone:(NSUInteger)length;
+
+/**
+ * Called after the socket has successfully completed SSL/TLS negotiation.
+ * This method is not called unless you use the provided startTLS method.
+ * 
+ * If a SSL/TLS negotiation fails (invalid certificate, etc) then the socket will immediately close,
+ * and the onSocket:willDisconnectWithError: delegate method will be called with the specific SSL error code.
+**/
+- (void)onSocketDidSecure:(AsyncSocket *)sock;
+
+@end
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark -
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+@interface AsyncSocket : NSObject
+{
+	CFSocketNativeHandle theNativeSocket4;
+	CFSocketNativeHandle theNativeSocket6;
+	
+	CFSocketRef theSocket4;            // IPv4 accept or connect socket
+	CFSocketRef theSocket6;            // IPv6 accept or connect socket
+	
+	CFReadStreamRef theReadStream;
+	CFWriteStreamRef theWriteStream;
+
+	CFRunLoopSourceRef theSource4;     // For theSocket4
+	CFRunLoopSourceRef theSource6;     // For theSocket6
+	CFRunLoopRef theRunLoop;
+	CFSocketContext theContext;
+	NSArray *theRunLoopModes;
+	
+	NSTimer *theConnectTimer;
+
+	NSMutableArray *theReadQueue;
+	AsyncReadPacket *theCurrentRead;
+	NSTimer *theReadTimer;
+	NSMutableData *partialReadBuffer;
+	
+	NSMutableArray *theWriteQueue;
+	AsyncWritePacket *theCurrentWrite;
+	NSTimer *theWriteTimer;
+
+	id theDelegate;
+	UInt16 theFlags;
+	
+	long theUserData;
+}
+
+- (id)init;
+- (id)initWithDelegate:(id)delegate;
+- (id)initWithDelegate:(id)delegate userData:(long)userData;
+
+/* String representation is long but has no "\n". */
+- (NSString *)description;
+
+/**
+ * Use "canSafelySetDelegate" to see if there is any pending business (reads and writes) with the current delegate
+ * before changing it.  It is, of course, safe to change the delegate before connecting or accepting connections.
+**/
+- (id)delegate;
+- (BOOL)canSafelySetDelegate;
+- (void)setDelegate:(id)delegate;
+
+/* User data can be a long, or an id or void * cast to a long. */
+- (long)userData;
+- (void)setUserData:(long)userData;
+
+/* Don't use these to read or write. And don't close them either! */
+- (CFSocketRef)getCFSocket;
+- (CFReadStreamRef)getCFReadStream;
+- (CFWriteStreamRef)getCFWriteStream;
+
+// Once one of the accept or connect methods are called, the AsyncSocket instance is locked in
+// and the other accept/connect methods can't be called without disconnecting the socket first.
+// If the attempt fails or times out, these methods either return NO or
+// call "onSocket:willDisconnectWithError:" and "onSockedDidDisconnect:".
+
+// When an incoming connection is accepted, AsyncSocket invokes several delegate methods.
+// These methods are (in chronological order):
+// 1. onSocket:didAcceptNewSocket:
+// 2. onSocket:wantsRunLoopForNewSocket:
+// 3. onSocketWillConnect:
+// 
+// Your server code will need to retain the accepted socket (if you want to accept it).
+// The best place to do this is probably in the onSocket:didAcceptNewSocket: method.
+// 
+// After the read and write streams have been setup for the newly accepted socket,
+// the onSocket:didConnectToHost:port: method will be called on the proper run loop.
+// 
+// Multithreading Note: If you're going to be moving the newly accepted socket to another run
+// loop by implementing onSocket:wantsRunLoopForNewSocket:, then you should wait until the
+// onSocket:didConnectToHost:port: method before calling read, write, or startTLS methods.
+// Otherwise read/write events are scheduled on the incorrect runloop, and chaos may ensue.
+
+/**
+ * Tells the socket to begin listening and accepting connections on the given port.
+ * When a connection comes in, the AsyncSocket instance will call the various delegate methods (see above).
+ * The socket will listen on all available interfaces (e.g. wifi, ethernet, etc)
+**/
+- (BOOL)acceptOnPort:(UInt16)port error:(NSError **)errPtr;
+
+/**
+ * This method is the same as acceptOnPort:error: with the additional option
+ * of specifying which interface to listen on. So, for example, if you were writing code for a server that
+ * has multiple IP addresses, you could specify which address you wanted to listen on.  Or you could use it
+ * to specify that the socket should only accept connections over ethernet, and not other interfaces such as wifi.
+ * You may also use the special strings "localhost" or "loopback" to specify that
+ * the socket only accept connections from the local machine.
+ * 
+ * To accept connections on any interface pass nil, or simply use the acceptOnPort:error: method.
+**/
+- (BOOL)acceptOnInterface:(NSString *)interface port:(UInt16)port error:(NSError **)errPtr;
+
+/**
+ * Connects to the given host and port.
+ * The host may be a domain name (e.g. "deusty.com") or an IP address string (e.g. "192.168.0.2")
+**/
+- (BOOL)connectToHost:(NSString *)hostname onPort:(UInt16)port error:(NSError **)errPtr;
+
+/**
+ * This method is the same as connectToHost:onPort:error: with an additional timeout option.
+ * To not time out use a negative time interval, or simply use the connectToHost:onPort:error: method.
+**/
+- (BOOL)connectToHost:(NSString *)hostname
+			   onPort:(UInt16)port
+		  withTimeout:(NSTimeInterval)timeout
+				error:(NSError **)errPtr;
+
+/**
+ * Connects to the given address, specified as a sockaddr structure wrapped in a NSData object.
+ * For example, a NSData object returned from NSNetService's addresses method.
+ * 
+ * If you have an existing struct sockaddr you can convert it to a NSData object like so:
+ * struct sockaddr sa  -> NSData *dsa = [NSData dataWithBytes:&remoteAddr length:remoteAddr.sa_len];
+ * struct sockaddr *sa -> NSData *dsa = [NSData dataWithBytes:remoteAddr length:remoteAddr->sa_len];
+**/
+- (BOOL)connectToAddress:(NSData *)remoteAddr error:(NSError **)errPtr;
+
+/**
+ * This method is the same as connectToAddress:error: with an additional timeout option.
+ * To not time out use a negative time interval, or simply use the connectToAddress:error: method.
+**/
+- (BOOL)connectToAddress:(NSData *)remoteAddr withTimeout:(NSTimeInterval)timeout error:(NSError **)errPtr;
+
+- (BOOL)connectToAddress:(NSData *)remoteAddr
+     viaInterfaceAddress:(NSData *)interfaceAddr
+             withTimeout:(NSTimeInterval)timeout
+                   error:(NSError **)errPtr;
+
+/**
+ * Disconnects immediately. Any pending reads or writes are dropped.
+ * If the socket is not already disconnected, the onSocketDidDisconnect delegate method
+ * will be called immediately, before this method returns.
+ * 
+ * Please note the recommended way of releasing an AsyncSocket instance (e.g. in a dealloc method)
+ * [asyncSocket setDelegate:nil];
+ * [asyncSocket disconnect];
+ * [asyncSocket release];
+**/
+- (void)disconnect;
+
+/**
+ * Disconnects after all pending reads have completed.
+ * After calling this, the read and write methods will do nothing.
+ * The socket will disconnect even if there are still pending writes.
+**/
+- (void)disconnectAfterReading;
+
+/**
+ * Disconnects after all pending writes have completed.
+ * After calling this, the read and write methods will do nothing.
+ * The socket will disconnect even if there are still pending reads.
+**/
+- (void)disconnectAfterWriting;
+
+/**
+ * Disconnects after all pending reads and writes have completed.
+ * After calling this, the read and write methods will do nothing.
+**/
+- (void)disconnectAfterReadingAndWriting;
+
+/* Returns YES if the socket and streams are open, connected, and ready for reading and writing. */
+- (BOOL)isConnected;
+
+/**
+ * Returns the local or remote host and port to which this socket is connected, or nil and 0 if not connected.
+ * The host will be an IP address.
+**/
+- (NSString *)connectedHost;
+- (UInt16)connectedPort;
+
+- (NSString *)localHost;
+- (UInt16)localPort;
+
+/**
+ * Returns the local or remote address to which this socket is connected,
+ * specified as a sockaddr structure wrapped in a NSData object.
+ * 
+ * See also the connectedHost, connectedPort, localHost and localPort methods.
+**/
+- (NSData *)connectedAddress;
+- (NSData *)localAddress;
+
+/**
+ * Returns whether the socket is IPv4 or IPv6.
+ * An accepting socket may be both.
+**/
+- (BOOL)isIPv4;
+- (BOOL)isIPv6;
+
+// The readData and writeData methods won't block (they are asynchronous).
+// 
+// When a read is complete the onSocket:didReadData:withTag: delegate method is called.
+// When a write is complete the onSocket:didWriteDataWithTag: delegate method is called.
+// 
+// You may optionally set a timeout for any read/write operation. (To not timeout, use a negative time interval.)
+// If a read/write opertion times out, the corresponding "onSocket:shouldTimeout..." delegate method
+// is called to optionally allow you to extend the timeout.
+// Upon a timeout, the "onSocket:willDisconnectWithError:" method is called, followed by "onSocketDidDisconnect".
+// 
+// The tag is for your convenience.
+// You can use it as an array index, step number, state id, pointer, etc.
+
+/**
+ * Reads the first available bytes that become available on the socket.
+ * 
+ * If the timeout value is negative, the read operation will not use a timeout.
+**/
+- (void)readDataWithTimeout:(NSTimeInterval)timeout tag:(long)tag;
+
+/**
+ * Reads the first available bytes that become available on the socket.
+ * The bytes will be appended to the given byte buffer starting at the given offset.
+ * The given buffer will automatically be increased in size if needed.
+ * 
+ * If the timeout value is negative, the read operation will not use a timeout.
+ * If the buffer if nil, the socket will create a buffer for you.
+ * 
+ * If the bufferOffset is greater than the length of the given buffer,
+ * the method will do nothing, and the delegate will not be called.
+ * 
+ * If you pass a buffer, you must not alter it in any way while AsyncSocket is using it.
+ * After completion, the data returned in onSocket:didReadData:withTag: will be a subset of the given buffer.
+ * That is, it will reference the bytes that were appended to the given buffer.
+**/
+- (void)readDataWithTimeout:(NSTimeInterval)timeout
+					 buffer:(NSMutableData *)buffer
+			   bufferOffset:(NSUInteger)offset
+						tag:(long)tag;
+
+/**
+ * Reads the first available bytes that become available on the socket.
+ * The bytes will be appended to the given byte buffer starting at the given offset.
+ * The given buffer will automatically be increased in size if needed.
+ * A maximum of length bytes will be read.
+ * 
+ * If the timeout value is negative, the read operation will not use a timeout.
+ * If the buffer if nil, a buffer will automatically be created for you.
+ * If maxLength is zero, no length restriction is enforced.
+ * 
+ * If the bufferOffset is greater than the length of the given buffer,
+ * the method will do nothing, and the delegate will not be called.
+ * 
+ * If you pass a buffer, you must not alter it in any way while AsyncSocket is using it.
+ * After completion, the data returned in onSocket:didReadData:withTag: will be a subset of the given buffer.
+ * That is, it will reference the bytes that were appended to the given buffer.
+**/
+- (void)readDataWithTimeout:(NSTimeInterval)timeout
+                     buffer:(NSMutableData *)buffer
+               bufferOffset:(NSUInteger)offset
+                  maxLength:(NSUInteger)length
+                        tag:(long)tag;
+
+/**
+ * Reads the given number of bytes.
+ * 
+ * If the timeout value is negative, the read operation will not use a timeout.
+ * 
+ * If the length is 0, this method does nothing and the delegate is not called.
+**/
+- (void)readDataToLength:(NSUInteger)length withTimeout:(NSTimeInterval)timeout tag:(long)tag;
+
+/**
+ * Reads the given number of bytes.
+ * The bytes will be appended to the given byte buffer starting at the given offset.
+ * The given buffer will automatically be increased in size if needed.
+ * 
+ * If the timeout value is negative, the read operation will not use a timeout.
+ * If the buffer if nil, a buffer will automatically be created for you.
+ * 
+ * If the length is 0, this method does nothing and the delegate is not called.
+ * If the bufferOffset is greater than the length of the given buffer,
+ * the method will do nothing, and the delegate will not be called.
+ * 
+ * If you pass a buffer, you must not alter it in any way while AsyncSocket is using it.
+ * After completion, the data returned in onSocket:didReadData:withTag: will be a subset of the given buffer.
+ * That is, it will reference the bytes that were appended to the given buffer.
+**/
+- (void)readDataToLength:(NSUInteger)length
+             withTimeout:(NSTimeInterval)timeout
+                  buffer:(NSMutableData *)buffer
+            bufferOffset:(NSUInteger)offset
+                     tag:(long)tag;
+
+/**
+ * Reads bytes until (and including) the passed "data" parameter, which acts as a separator.
+ * 
+ * If the timeout value is negative, the read operation will not use a timeout.
+ * 
+ * If you pass nil or zero-length data as the "data" parameter,
+ * the method will do nothing, and the delegate will not be called.
+ * 
+ * To read a line from the socket, use the line separator (e.g. CRLF for HTTP, see below) as the "data" parameter.
+ * Note that this method is not character-set aware, so if a separator can occur naturally as part of the encoding for
+ * a character, the read will prematurely end.
+**/
+- (void)readDataToData:(NSData *)data withTimeout:(NSTimeInterval)timeout tag:(long)tag;
+
+/**
+ * Reads bytes until (and including) the passed "data" parameter, which acts as a separator.
+ * The bytes will be appended to the given byte buffer starting at the given offset.
+ * The given buffer will automatically be increased in size if needed.
+ * 
+ * If the timeout value is negative, the read operation will not use a timeout.
+ * If the buffer if nil, a buffer will automatically be created for you.
+ * 
+ * If the bufferOffset is greater than the length of the given buffer,
+ * the method will do nothing, and the delegate will not be called.
+ * 
+ * If you pass a buffer, you must not alter it in any way while AsyncSocket is using it.
+ * After completion, the data returned in onSocket:didReadData:withTag: will be a subset of the given buffer.
+ * That is, it will reference the bytes that were appended to the given buffer.
+ * 
+ * To read a line from the socket, use the line separator (e.g. CRLF for HTTP, see below) as the "data" parameter.
+ * Note that this method is not character-set aware, so if a separator can occur naturally as part of the encoding for
+ * a character, the read will prematurely end.
+**/
+- (void)readDataToData:(NSData *)data
+           withTimeout:(NSTimeInterval)timeout
+                buffer:(NSMutableData *)buffer
+          bufferOffset:(NSUInteger)offset
+                   tag:(long)tag;
+
+/**
+ * Reads bytes until (and including) the passed "data" parameter, which acts as a separator.
+ * 
+ * If the timeout value is negative, the read operation will not use a timeout.
+ * 
+ * If maxLength is zero, no length restriction is enforced.
+ * Otherwise if maxLength bytes are read without completing the read,
+ * it is treated similarly to a timeout - the socket is closed with a AsyncSocketReadMaxedOutError.
+ * The read will complete successfully if exactly maxLength bytes are read and the given data is found at the end.
+ * 
+ * If you pass nil or zero-length data as the "data" parameter,
+ * the method will do nothing, and the delegate will not be called.
+ * If you pass a maxLength parameter that is less than the length of the data parameter,
+ * the method will do nothing, and the delegate will not be called.
+ * 
+ * To read a line from the socket, use the line separator (e.g. CRLF for HTTP, see below) as the "data" parameter.
+ * Note that this method is not character-set aware, so if a separator can occur naturally as part of the encoding for
+ * a character, the read will prematurely end.
+**/
+- (void)readDataToData:(NSData *)data withTimeout:(NSTimeInterval)timeout maxLength:(NSUInteger)length tag:(long)tag;
+
+/**
+ * Reads bytes until (and including) the passed "data" parameter, which acts as a separator.
+ * The bytes will be appended to the given byte buffer starting at the given offset.
+ * The given buffer will automatically be increased in size if needed.
+ * A maximum of length bytes will be read.
+ * 
+ * If the timeout value is negative, the read operation will not use a timeout.
+ * If the buffer if nil, a buffer will automatically be created for you.
+ * 
+ * If maxLength is zero, no length restriction is enforced.
+ * Otherwise if maxLength bytes are read without completing the read,
+ * it is treated similarly to a timeout - the socket is closed with a AsyncSocketReadMaxedOutError.
+ * The read will complete successfully if exactly maxLength bytes are read and the given data is found at the end.
+ * 
+ * If you pass a maxLength parameter that is less than the length of the data parameter,
+ * the method will do nothing, and the delegate will not be called.
+ * If the bufferOffset is greater than the length of the given buffer,
+ * the method will do nothing, and the delegate will not be called.
+ * 
+ * If you pass a buffer, you must not alter it in any way while AsyncSocket is using it.
+ * After completion, the data returned in onSocket:didReadData:withTag: will be a subset of the given buffer.
+ * That is, it will reference the bytes that were appended to the given buffer.
+ * 
+ * To read a line from the socket, use the line separator (e.g. CRLF for HTTP, see below) as the "data" parameter.
+ * Note that this method is not character-set aware, so if a separator can occur naturally as part of the encoding for
+ * a character, the read will prematurely end.
+**/
+- (void)readDataToData:(NSData *)data
+           withTimeout:(NSTimeInterval)timeout
+                buffer:(NSMutableData *)buffer
+          bufferOffset:(NSUInteger)offset
+             maxLength:(NSUInteger)length
+                   tag:(long)tag;
+
+/**
+ * Writes data to the socket, and calls the delegate when finished.
+ * 
+ * If you pass in nil or zero-length data, this method does nothing and the delegate will not be called.
+ * If the timeout value is negative, the write operation will not use a timeout.
+**/
+- (void)writeData:(NSData *)data withTimeout:(NSTimeInterval)timeout tag:(long)tag;
+
+/**
+ * Returns progress of current read or write, from 0.0 to 1.0, or NaN if no read/write (use isnan() to check).
+ * "tag", "done" and "total" will be filled in if they aren't NULL.
+**/
+- (float)progressOfReadReturningTag:(long *)tag bytesDone:(NSUInteger *)done total:(NSUInteger *)total;
+- (float)progressOfWriteReturningTag:(long *)tag bytesDone:(NSUInteger *)done total:(NSUInteger *)total;
+
+/**
+ * Secures the connection using SSL/TLS.
+ * 
+ * This method may be called at any time, and the TLS handshake will occur after all pending reads and writes
+ * are finished. This allows one the option of sending a protocol dependent StartTLS message, and queuing
+ * the upgrade to TLS at the same time, without having to wait for the write to finish.
+ * Any reads or writes scheduled after this method is called will occur over the secured connection.
+ * 
+ * The possible keys and values for the TLS settings are well documented.
+ * Some possible keys are:
+ * - kCFStreamSSLLevel
+ * - kCFStreamSSLAllowsExpiredCertificates
+ * - kCFStreamSSLAllowsExpiredRoots
+ * - kCFStreamSSLAllowsAnyRoot
+ * - kCFStreamSSLValidatesCertificateChain
+ * - kCFStreamSSLPeerName
+ * - kCFStreamSSLCertificates
+ * - kCFStreamSSLIsServer
+ * 
+ * Please refer to Apple's documentation for associated values, as well as other possible keys.
+ * 
+ * If you pass in nil or an empty dictionary, the default settings will be used.
+ * 
+ * The default settings will check to make sure the remote party's certificate is signed by a
+ * trusted 3rd party certificate agency (e.g. verisign) and that the certificate is not expired.
+ * However it will not verify the name on the certificate unless you
+ * give it a name to verify against via the kCFStreamSSLPeerName key.
+ * The security implications of this are important to understand.
+ * Imagine you are attempting to create a secure connection to MySecureServer.com,
+ * but your socket gets directed to MaliciousServer.com because of a hacked DNS server.
+ * If you simply use the default settings, and MaliciousServer.com has a valid certificate,
+ * the default settings will not detect any problems since the certificate is valid.
+ * To properly secure your connection in this particular scenario you
+ * should set the kCFStreamSSLPeerName property to "MySecureServer.com".
+ * If you do not know the peer name of the remote host in advance (for example, you're not sure
+ * if it will be "domain.com" or "www.domain.com"), then you can use the default settings to validate the
+ * certificate, and then use the X509Certificate class to verify the issuer after the socket has been secured.
+ * The X509Certificate class is part of the CocoaAsyncSocket open source project.
+**/
+- (void)startTLS:(NSDictionary *)tlsSettings;
+
+/**
+ * For handling readDataToData requests, data is necessarily read from the socket in small increments.
+ * The performance can be much improved by allowing AsyncSocket to read larger chunks at a time and
+ * store any overflow in a small internal buffer.
+ * This is termed pre-buffering, as some data may be read for you before you ask for it.
+ * If you use readDataToData a lot, enabling pre-buffering will result in better performance, especially on the iPhone.
+ * 
+ * The default pre-buffering state is controlled by the DEFAULT_PREBUFFERING definition.
+ * It is highly recommended one leave this set to YES.
+ * 
+ * This method exists in case pre-buffering needs to be disabled by default for some unforeseen reason.
+ * In that case, this method exists to allow one to easily enable pre-buffering when ready.
+**/
+- (void)enablePreBuffering;
+
+/**
+ * When you create an AsyncSocket, it is added to the runloop of the current thread.
+ * So for manually created sockets, it is easiest to simply create the socket on the thread you intend to use it.
+ * 
+ * If a new socket is accepted, the delegate method onSocket:wantsRunLoopForNewSocket: is called to
+ * allow you to place the socket on a separate thread. This works best in conjunction with a thread pool design.
+ * 
+ * If, however, you need to move the socket to a separate thread at a later time, this
+ * method may be used to accomplish the task.
+ * 
+ * This method must be called from the thread/runloop the socket is currently running on.
+ * 
+ * Note: After calling this method, all further method calls to this object should be done from the given runloop.
+ * Also, all delegate calls will be sent on the given runloop.
+**/
+- (BOOL)moveToRunLoop:(NSRunLoop *)runLoop;
+
+/**
+ * Allows you to configure which run loop modes the socket uses.
+ * The default set of run loop modes is NSDefaultRunLoopMode.
+ * 
+ * If you'd like your socket to continue operation during other modes, you may want to add modes such as
+ * NSModalPanelRunLoopMode or NSEventTrackingRunLoopMode. Or you may simply want to use NSRunLoopCommonModes.
+ * 
+ * Accepted sockets will automatically inherit the same run loop modes as the listening socket.
+ * 
+ * Note: NSRunLoopCommonModes is defined in 10.5. For previous versions one can use kCFRunLoopCommonModes.
+**/
+- (BOOL)setRunLoopModes:(NSArray *)runLoopModes;
+- (BOOL)addRunLoopMode:(NSString *)runLoopMode;
+- (BOOL)removeRunLoopMode:(NSString *)runLoopMode;
+
+/**
+ * Returns the current run loop modes the AsyncSocket instance is operating in.
+ * The default set of run loop modes is NSDefaultRunLoopMode.
+**/
+- (NSArray *)runLoopModes;
+
+/**
+ * In the event of an error, this method may be called during onSocket:willDisconnectWithError: to read
+ * any data that's left on the socket.
+**/
+- (NSData *)unreadData;
+
+/* A few common line separators, for use with the readDataToData:... methods. */
++ (NSData *)CRLFData;   // 0x0D0A
++ (NSData *)CRData;     // 0x0D
++ (NSData *)LFData;     // 0x0A
++ (NSData *)ZeroData;   // 0x00
+
+@end
diff --git a/NexusVideoPlayer/AsyncSocket.m b/NexusVideoPlayer/AsyncSocket.m
new file mode 100644
index 0000000..76bafe4
--- /dev/null
+++ b/NexusVideoPlayer/AsyncSocket.m
@@ -0,0 +1,4333 @@
+//
+//  AsyncSocket.m
+//  
+//  This class is in the public domain.
+//  Originally created by Dustin Voss on Wed Jan 29 2003.
+//  Updated and maintained by Deusty Designs and the Mac development community.
+//
+//  http://code.google.com/p/cocoaasyncsocket/
+//
+
+#if ! __has_feature(objc_arc)
+#warning This file must be compiled with ARC. Use -fobjc-arc flag (or convert project to ARC).
+#endif
+
+#import "AsyncSocket.h"
+#import "NexusTalkTimerTarget.h"
+#import "NLCommonLoggingNVP.h"
+#import <sys/socket.h>
+#import <netinet/in.h>
+#import <arpa/inet.h>
+#import <netdb.h>
+
+#if TARGET_OS_IPHONE
+// Note: You may need to add the CFNetwork Framework to your project
+#import <CFNetwork/CFNetwork.h>
+#endif
+
+#pragma mark Declarations
+
+#define DEFAULT_PREBUFFERING YES        // Whether pre-buffering is enabled by default
+
+#define READQUEUE_CAPACITY	5           // Initial capacity
+#define WRITEQUEUE_CAPACITY 5           // Initial capacity
+#define READALL_CHUNKSIZE	256         // Incremental increase in buffer size
+#define WRITE_CHUNKSIZE    (1024 * 4)   // Limit on size of each write pass
+
+// AsyncSocket is RunLoop based, and is thus not thread-safe.
+// You must always access your AsyncSocket instance from the thread/runloop in which the instance is running.
+// You can use methods such as performSelectorOnThread to accomplish this.
+// Failure to comply with these thread-safety rules may result in errors.
+// You can enable this option to help diagnose where you are incorrectly accessing your socket.
+#if DEBUG
+  #define DEBUG_THREAD_SAFETY 1
+#else
+  #define DEBUG_THREAD_SAFETY 0
+#endif
+// 
+// If you constantly need to access your socket from multiple threads
+// then you may consider using GCDAsyncSocket instead, which is thread-safe.
+
+NSString *const AsyncSocketException = @"AsyncSocketException";
+NSString *const AsyncSocketErrorDomain = @"AsyncSocketErrorDomain";
+
+
+enum AsyncSocketFlags
+{
+	kEnablePreBuffering      = 1 <<  0,  // If set, pre-buffering is enabled
+	kDidStartDelegate        = 1 <<  1,  // If set, disconnection results in delegate call
+	kDidCompleteOpenForRead  = 1 <<  2,  // If set, open callback has been called for read stream
+	kDidCompleteOpenForWrite = 1 <<  3,  // If set, open callback has been called for write stream
+	kStartingReadTLS         = 1 <<  4,  // If set, we're waiting for TLS negotiation to complete
+	kStartingWriteTLS        = 1 <<  5,  // If set, we're waiting for TLS negotiation to complete
+	kForbidReadsWrites       = 1 <<  6,  // If set, no new reads or writes are allowed
+	kDisconnectAfterReads    = 1 <<  7,  // If set, disconnect after no more reads are queued
+	kDisconnectAfterWrites   = 1 <<  8,  // If set, disconnect after no more writes are queued
+	kClosingWithError        = 1 <<  9,  // If set, the socket is being closed due to an error
+	kDequeueReadScheduled    = 1 << 10,  // If set, a maybeDequeueRead operation is already scheduled
+	kDequeueWriteScheduled   = 1 << 11,  // If set, a maybeDequeueWrite operation is already scheduled
+	kSocketCanAcceptBytes    = 1 << 12,  // If set, we know socket can accept bytes. If unset, it's unknown.
+	kSocketHasBytesAvailable = 1 << 13,  // If set, we know socket has bytes available. If unset, it's unknown.
+};
+
+@interface AsyncSocket (Private) <NexusTalkTimerTargetProtocol>
+
+// Connecting
+- (void)startConnectTimeout:(NSTimeInterval)timeout;
+- (void)endConnectTimeout;
+- (void)doConnectTimeout:(NSTimer *)timer;
+
+// Socket Implementation
+- (CFSocketRef)newAcceptSocketForAddress:(NSData *)addr error:(NSError **)errPtr;
+- (BOOL)createSocketForAddress:(NSData *)remoteAddr error:(NSError **)errPtr;
+- (BOOL)bindSocketToAddress:(NSData *)interfaceAddr error:(NSError **)errPtr;
+- (BOOL)attachSocketsToRunLoop:(NSRunLoop *)runLoop error:(NSError **)errPtr;
+- (BOOL)configureSocketAndReturnError:(NSError **)errPtr;
+- (BOOL)connectSocketToAddress:(NSData *)remoteAddr error:(NSError **)errPtr;
+- (void)doAcceptWithSocket:(CFSocketNativeHandle)newSocket;
+- (void)doSocketOpen:(CFSocketRef)sock withCFSocketError:(CFSocketError)err;
+
+// Stream Implementation
+- (BOOL)createStreamsFromNative:(CFSocketNativeHandle)native error:(NSError **)errPtr;
+- (BOOL)createStreamsToHost:(NSString *)hostname onPort:(UInt16)port error:(NSError **)errPtr;
+- (BOOL)attachStreamsToRunLoop:(NSRunLoop *)runLoop error:(NSError **)errPtr;
+- (BOOL)configureStreamsAndReturnError:(NSError **)errPtr;
+- (BOOL)openStreamsAndReturnError:(NSError **)errPtr;
+- (void)doStreamOpen;
+- (BOOL)setSocketFromStreamsAndReturnError:(NSError **)errPtr;
+
+// Disconnect Implementation
+- (void)closeWithError:(NSError *)err;
+- (void)recoverUnreadData;
+- (void)emptyQueues;
+- (void)close;
+
+// Errors
+- (NSError *)getErrnoError;
+- (NSError *)getAbortError;
+- (NSError *)getStreamError;
+- (NSError *)getSocketError;
+- (NSError *)getConnectTimeoutError;
+- (NSError *)getReadMaxedOutError;
+- (NSError *)getReadTimeoutError;
+- (NSError *)getWriteTimeoutError;
+- (NSError *)errorFromCFStreamError:(CFStreamError)err;
+
+// Diagnostics
+- (BOOL)isDisconnected;
+- (BOOL)areStreamsConnected;
+- (NSString *)connectedHostFromNativeSocket4:(CFSocketNativeHandle)theNativeSocket;
+- (NSString *)connectedHostFromNativeSocket6:(CFSocketNativeHandle)theNativeSocket;
+- (NSString *)connectedHostFromCFSocket4:(CFSocketRef)socket;
+- (NSString *)connectedHostFromCFSocket6:(CFSocketRef)socket;
+- (UInt16)connectedPortFromNativeSocket4:(CFSocketNativeHandle)theNativeSocket;
+- (UInt16)connectedPortFromNativeSocket6:(CFSocketNativeHandle)theNativeSocket;
+- (UInt16)connectedPortFromCFSocket4:(CFSocketRef)socket;
+- (UInt16)connectedPortFromCFSocket6:(CFSocketRef)socket;
+- (NSString *)localHostFromNativeSocket4:(CFSocketNativeHandle)theNativeSocket;
+- (NSString *)localHostFromNativeSocket6:(CFSocketNativeHandle)theNativeSocket;
+- (NSString *)localHostFromCFSocket4:(CFSocketRef)socket;
+- (NSString *)localHostFromCFSocket6:(CFSocketRef)socket;
+- (UInt16)localPortFromNativeSocket4:(CFSocketNativeHandle)theNativeSocket;
+- (UInt16)localPortFromNativeSocket6:(CFSocketNativeHandle)theNativeSocket;
+- (UInt16)localPortFromCFSocket4:(CFSocketRef)socket;
+- (UInt16)localPortFromCFSocket6:(CFSocketRef)socket;
+- (NSString *)hostFromAddress4:(struct sockaddr_in *)pSockaddr4;
+- (NSString *)hostFromAddress6:(struct sockaddr_in6 *)pSockaddr6;
+- (UInt16)portFromAddress4:(struct sockaddr_in *)pSockaddr4;
+- (UInt16)portFromAddress6:(struct sockaddr_in6 *)pSockaddr6;
+
+// Reading
+- (void)doBytesAvailable;
+- (void)completeCurrentRead;
+- (void)endCurrentRead;
+- (void)scheduleDequeueRead;
+- (void)maybeDequeueRead;
+- (void)doReadTimeout:(NSTimer *)timer;
+
+// Writing
+- (void)doSendBytes;
+- (void)completeCurrentWrite;
+- (void)endCurrentWrite;
+- (void)scheduleDequeueWrite;
+- (void)maybeDequeueWrite;
+- (void)maybeScheduleDisconnect;
+- (void)doWriteTimeout:(NSTimer *)timer;
+
+// Run Loop
+- (void)runLoopAddSource:(CFRunLoopSourceRef)source;
+- (void)runLoopRemoveSource:(CFRunLoopSourceRef)source;
+- (void)runLoopAddTimer:(NSTimer *)timer;
+- (void)runLoopRemoveTimer:(NSTimer *)timer;
+- (void)runLoopUnscheduleReadStream;
+- (void)runLoopUnscheduleWriteStream;
+
+// Security
+- (void)maybeStartTLS;
+- (void)onTLSHandshakeSuccessful;
+
+// Callbacks
+- (void)doCFCallback:(CFSocketCallBackType)type
+           forSocket:(CFSocketRef)sock withAddress:(NSData *)address withData:(const void *)pData;
+- (void)doCFReadStreamCallback:(CFStreamEventType)type forStream:(CFReadStreamRef)stream;
+- (void)doCFWriteStreamCallback:(CFStreamEventType)type forStream:(CFWriteStreamRef)stream;
+
+@end
+
+static void MyCFSocketCallback(CFSocketRef, CFSocketCallBackType, CFDataRef, const void *, void *);
+static void MyCFReadStreamCallback(CFReadStreamRef stream, CFStreamEventType type, void *pInfo);
+static void MyCFWriteStreamCallback(CFWriteStreamRef stream, CFStreamEventType type, void *pInfo);
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark -
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+/**
+ * The AsyncReadPacket encompasses the instructions for any given read.
+ * The content of a read packet allows the code to determine if we're:
+ *  - reading to a certain length
+ *  - reading to a certain separator
+ *  - or simply reading the first chunk of available data
+**/
+@interface AsyncReadPacket : NSObject
+{
+  @public
+	NSMutableData *buffer;
+	NSUInteger startOffset;
+	NSUInteger bytesDone;
+	NSUInteger maxLength;
+	NSTimeInterval timeout;
+	NSUInteger readLength;
+	NSData *term;
+	BOOL bufferOwner;
+	NSUInteger originalBufferLength;
+	long tag;
+}
+- (id)initWithData:(NSMutableData *)d
+       startOffset:(NSUInteger)s
+         maxLength:(NSUInteger)m
+           timeout:(NSTimeInterval)t
+        readLength:(NSUInteger)l
+        terminator:(NSData *)e
+               tag:(long)i;
+
+- (NSUInteger)readLengthForNonTerm;
+- (NSUInteger)readLengthForTerm;
+- (NSUInteger)readLengthForTermWithPreBuffer:(NSData *)preBuffer found:(BOOL *)foundPtr;
+
+- (NSUInteger)prebufferReadLengthForTerm;
+- (NSInteger)searchForTermAfterPreBuffering:(NSUInteger)numBytes;
+@end
+
+@implementation AsyncReadPacket
+
+- (id)initWithData:(NSMutableData *)d
+       startOffset:(NSUInteger)s
+         maxLength:(NSUInteger)m
+           timeout:(NSTimeInterval)t
+        readLength:(NSUInteger)l
+        terminator:(NSData *)e
+               tag:(long)i
+{
+	if((self = [super init]))
+	{
+		if (d)
+		{
+			buffer = d;
+			startOffset = s;
+			bufferOwner = NO;
+			originalBufferLength = [d length];
+		}
+		else
+		{
+			if (readLength > 0)
+				buffer = [[NSMutableData alloc] initWithLength:readLength];
+			else
+				buffer = [[NSMutableData alloc] initWithLength:0];
+			
+			startOffset = 0;
+			bufferOwner = YES;
+			originalBufferLength = 0;
+		}
+		
+		bytesDone = 0;
+		maxLength = m;
+		timeout = t;
+		readLength = l;
+		term = [e copy];
+		tag = i;
+	}
+	return self;
+}
+
+/**
+ * For read packets without a set terminator, returns the safe length of data that can be read
+ * without exceeding the maxLength, or forcing a resize of the buffer if at all possible.
+**/
+- (NSUInteger)readLengthForNonTerm
+{
+	NSAssert(term == nil, @"This method does not apply to term reads");
+	
+	if (readLength > 0)
+	{
+		// Read a specific length of data
+		
+		return readLength - bytesDone;
+		
+		// No need to avoid resizing the buffer.
+		// It should be resized if the buffer space is less than the requested read length.
+	}
+	else
+	{
+		// Read all available data
+		
+		NSUInteger result = READALL_CHUNKSIZE;
+		
+		if (maxLength > 0)
+		{
+			result = MIN(result, (maxLength - bytesDone));
+		}
+		
+		if (!bufferOwner)
+		{
+			// We did NOT create the buffer.
+			// It is owned by the caller.
+			// Avoid resizing the buffer if at all possible.
+			
+			if ([buffer length] == originalBufferLength)
+			{
+				NSUInteger buffSize = [buffer length];
+				NSUInteger buffSpace = buffSize - startOffset - bytesDone;
+				
+				if (buffSpace > 0)
+				{
+					result = MIN(result, buffSpace);
+				}
+			}
+		}
+		
+		return result;
+	}
+}
+
+/**
+ * For read packets with a set terminator, returns the safe length of data that can be read
+ * without going over a terminator, or the maxLength, or forcing a resize of the buffer if at all possible.
+ * 
+ * It is assumed the terminator has not already been read.
+**/
+- (NSUInteger)readLengthForTerm
+{
+	NSAssert(term != nil, @"This method does not apply to non-term reads");
+	
+	// What we're going to do is look for a partial sequence of the terminator at the end of the buffer.
+	// If a partial sequence occurs, then we must assume the next bytes to arrive will be the rest of the term,
+	// and we can only read that amount.
+	// Otherwise, we're safe to read the entire length of the term.
+	
+	NSUInteger termLength = [term length];
+	
+	// Shortcuts
+	if (bytesDone == 0) return termLength;
+	if (termLength == 1) return termLength;
+	
+	// i = index within buffer at which to check data
+	// j = length of term to check against
+	
+	NSUInteger i, j;
+	if (bytesDone >= termLength)
+	{
+		i = bytesDone - termLength + 1;
+		j = termLength - 1;
+	}
+	else
+	{
+		i = 0;
+		j = bytesDone;
+	}
+	
+	NSUInteger result = termLength;
+	
+	void *buf = [buffer mutableBytes];
+	const void *termBuf = [term bytes];
+	
+	while (i < bytesDone)
+	{
+		void *subbuf = buf + startOffset + i;
+		
+		if (memcmp(subbuf, termBuf, j) == 0)
+		{
+			result = termLength - j;
+			break;
+		}
+		
+		i++;
+		j--;
+	}
+	
+	if (maxLength > 0)
+	{
+		result = MIN(result, (maxLength - bytesDone));
+	}
+	
+	if (!bufferOwner)
+	{
+		// We did NOT create the buffer.
+		// It is owned by the caller.
+		// Avoid resizing the buffer if at all possible.
+		
+		if ([buffer length] == originalBufferLength)
+		{
+			NSUInteger buffSize = [buffer length];
+			NSUInteger buffSpace = buffSize - startOffset - bytesDone;
+			
+			if (buffSpace > 0)
+			{
+				result = MIN(result, buffSpace);
+			}
+		}
+	}
+	
+	return result;
+}
+
+/**
+ * For read packets with a set terminator,
+ * returns the safe length of data that can be read from the given preBuffer,
+ * without going over a terminator or the maxLength.
+ * 
+ * It is assumed the terminator has not already been read.
+**/
+- (NSUInteger)readLengthForTermWithPreBuffer:(NSData *)preBuffer found:(BOOL *)foundPtr
+{
+	NSAssert(term != nil, @"This method does not apply to non-term reads");
+	NSAssert([preBuffer length] > 0, @"Invoked with empty pre buffer!");
+	
+	// We know that the terminator, as a whole, doesn't exist in our own buffer.
+	// But it is possible that a portion of it exists in our buffer.
+	// So we're going to look for the terminator starting with a portion of our own buffer.
+	// 
+	// Example:
+	// 
+	// term length      = 3 bytes
+	// bytesDone        = 5 bytes
+	// preBuffer length = 5 bytes
+	// 
+	// If we append the preBuffer to our buffer,
+	// it would look like this:
+	// 
+	// ---------------------
+	// |B|B|B|B|B|P|P|P|P|P|
+	// ---------------------
+	// 
+	// So we start our search here:
+	// 
+	// ---------------------
+	// |B|B|B|B|B|P|P|P|P|P|
+	// -------^-^-^---------
+	// 
+	// And move forwards...
+	// 
+	// ---------------------
+	// |B|B|B|B|B|P|P|P|P|P|
+	// ---------^-^-^-------
+	// 
+	// Until we find the terminator or reach the end.
+	// 
+	// ---------------------
+	// |B|B|B|B|B|P|P|P|P|P|
+	// ---------------^-^-^-
+	
+	BOOL found = NO;
+	
+	NSUInteger termLength = [term length];
+	NSUInteger preBufferLength = [preBuffer length];
+	
+	if ((bytesDone + preBufferLength) < termLength)
+	{
+		// Not enough data for a full term sequence yet
+		return preBufferLength;
+	}
+	
+	NSUInteger maxPreBufferLength;
+	if (maxLength > 0) {
+		maxPreBufferLength = MIN(preBufferLength, (maxLength - bytesDone));
+		
+		// Note: maxLength >= termLength
+	}
+	else {
+		maxPreBufferLength = preBufferLength;
+	}
+	
+	Byte seq[termLength];
+	const void *termBuf = [term bytes];
+	
+	NSUInteger bufLen = MIN(bytesDone, (termLength - 1));
+	void *buf = [buffer mutableBytes] + startOffset + bytesDone - bufLen;
+	
+	NSUInteger preLen = termLength - bufLen;
+	void *pre = (void *)[preBuffer bytes];
+	
+	NSUInteger loopCount = bufLen + maxPreBufferLength - termLength + 1; // Plus one. See example above.
+	
+	NSUInteger result = preBufferLength;
+	
+	NSUInteger i;
+	for (i = 0; i < loopCount; i++)
+	{
+		if (bufLen > 0)
+		{
+			// Combining bytes from buffer and preBuffer
+			
+			memcpy(seq, buf, bufLen);
+			memcpy(seq + bufLen, pre, preLen);
+			
+			if (memcmp(seq, termBuf, termLength) == 0)
+			{
+				result = preLen;
+				found = YES;
+				break;
+			}
+			
+			buf++;
+			bufLen--;
+			preLen++;
+		}
+		else
+		{
+			// Comparing directly from preBuffer
+			
+			if (memcmp(pre, termBuf, termLength) == 0)
+			{
+				NSUInteger preOffset = pre - [preBuffer bytes]; // pointer arithmetic
+				
+				result = preOffset + termLength;
+				found = YES;
+				break;
+			}
+			
+			pre++;
+		}
+	}
+	
+	// There is no need to avoid resizing the buffer in this particular situation.
+	
+	if (foundPtr) *foundPtr = found;
+	return result;
+}
+
+/**
+ * Assuming pre-buffering is enabled, returns the amount of data that can be read
+ * without going over the maxLength.
+**/
+- (NSUInteger)prebufferReadLengthForTerm
+{
+	NSAssert(term != nil, @"This method does not apply to non-term reads");
+	
+	NSUInteger result = READALL_CHUNKSIZE;
+	
+	if (maxLength > 0)
+	{
+		result = MIN(result, (maxLength - bytesDone));
+	}
+	
+	if (!bufferOwner)
+	{
+		// We did NOT create the buffer.
+		// It is owned by the caller.
+		// Avoid resizing the buffer if at all possible.
+		
+		if ([buffer length] == originalBufferLength)
+		{
+			NSUInteger buffSize = [buffer length];
+			NSUInteger buffSpace = buffSize - startOffset - bytesDone;
+			
+			if (buffSpace > 0)
+			{
+				result = MIN(result, buffSpace);
+			}
+		}
+	}
+	
+	return result;
+}
+
+/**
+ * For read packets with a set terminator, scans the packet buffer for the term.
+ * It is assumed the terminator had not been fully read prior to the new bytes.
+ * 
+ * If the term is found, the number of excess bytes after the term are returned.
+ * If the term is not found, this method will return -1.
+ * 
+ * Note: A return value of zero means the term was found at the very end.
+**/
+- (NSInteger)searchForTermAfterPreBuffering:(NSUInteger)numBytes
+{
+	NSAssert(term != nil, @"This method does not apply to non-term reads");
+	NSAssert(bytesDone >= numBytes, @"Invoked with invalid numBytes!");
+	
+	// We try to start the search such that the first new byte read matches up with the last byte of the term.
+	// We continue searching forward after this until the term no longer fits into the buffer.
+	
+	NSUInteger termLength = [term length];
+	const void *termBuffer = [term bytes];
+	
+	// Remember: This method is called after the bytesDone variable has been updated.
+	
+	NSUInteger prevBytesDone = bytesDone - numBytes;
+	
+	NSUInteger i;
+	if (prevBytesDone >= termLength)
+		i = prevBytesDone - termLength + 1;
+	else
+		i = 0;
+	
+	while ((i + termLength) <= bytesDone)
+	{
+		void *subBuffer = [buffer mutableBytes] + startOffset + i;
+		
+		if(memcmp(subBuffer, termBuffer, termLength) == 0)
+		{
+			return bytesDone - (i + termLength);
+		}
+		
+		i++;
+	}
+	
+	return -1;
+}
+
+
+@end
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark -
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+/**
+ * The AsyncWritePacket encompasses the instructions for any given write.
+**/
+@interface AsyncWritePacket : NSObject
+{
+  @public
+	NSData *buffer;
+	NSUInteger bytesDone;
+	long tag;
+	NSTimeInterval timeout;
+}
+- (id)initWithData:(NSData *)d timeout:(NSTimeInterval)t tag:(long)i;
+@end
+
+@implementation AsyncWritePacket
+
+- (id)initWithData:(NSData *)d timeout:(NSTimeInterval)t tag:(long)i
+{
+	if((self = [super init]))
+	{
+		buffer = d;
+		timeout = t;
+		tag = i;
+		bytesDone = 0;
+	}
+	return self;
+}
+
+
+@end
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark -
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+/**
+ * The AsyncSpecialPacket encompasses special instructions for interruptions in the read/write queues.
+ * This class my be altered to support more than just TLS in the future.
+**/
+@interface AsyncSpecialPacket : NSObject
+{
+  @public
+	NSDictionary *tlsSettings;
+}
+- (id)initWithTLSSettings:(NSDictionary *)settings;
+@end
+
+@implementation AsyncSpecialPacket
+
+- (id)initWithTLSSettings:(NSDictionary *)settings
+{
+	if((self = [super init]))
+	{
+		tlsSettings = [settings copy];
+	}
+	return self;
+}
+
+
+@end
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark -
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+@implementation AsyncSocket
+
+- (id)init
+{
+	return [self initWithDelegate:nil userData:0];
+}
+
+- (id)initWithDelegate:(id)delegate
+{
+	return [self initWithDelegate:delegate userData:0];
+}
+
+// Designated initializer.
+- (id)initWithDelegate:(id)delegate userData:(long)userData
+{
+	if((self = [super init]))
+	{
+		theFlags = DEFAULT_PREBUFFERING ? kEnablePreBuffering : 0;
+		theDelegate = delegate;
+		theUserData = userData;
+		
+		theNativeSocket4 = 0;
+		theNativeSocket6 = 0;
+		
+		theSocket4 = NULL;
+		theSource4 = NULL;
+		
+		theSocket6 = NULL;
+		theSource6 = NULL;
+		
+		theRunLoop = NULL;
+		theReadStream = NULL;
+		theWriteStream = NULL;
+		
+		theConnectTimer = nil;
+		
+		theReadQueue = [[NSMutableArray alloc] initWithCapacity:READQUEUE_CAPACITY];
+		theCurrentRead = nil;
+		theReadTimer = nil;
+		
+		partialReadBuffer = [[NSMutableData alloc] initWithCapacity:READALL_CHUNKSIZE];
+		
+		theWriteQueue = [[NSMutableArray alloc] initWithCapacity:WRITEQUEUE_CAPACITY];
+		theCurrentWrite = nil;
+		theWriteTimer = nil;
+		
+		// Socket context
+		NSAssert(sizeof(CFSocketContext) == sizeof(CFStreamClientContext), @"CFSocketContext != CFStreamClientContext");
+		theContext.version = 0;
+		theContext.info = (__bridge void *)(self);
+		theContext.retain = nil;
+		theContext.release = nil;
+		theContext.copyDescription = nil;
+		
+		theRunLoopModes = [NSArray arrayWithObject:NSDefaultRunLoopMode];
+	}
+	return self;
+}
+
+// The socket may been initialized in a connected state and auto-released, so this should close it down cleanly.
+- (void)dealloc
+{
+	[self close];
+	[NSObject cancelPreviousPerformRequestsWithTarget:self];
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Thread-Safety
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+- (void)checkForThreadSafety
+{
+	if (theRunLoop && (theRunLoop != CFRunLoopGetCurrent()))
+	{
+		// AsyncSocket is RunLoop based.
+		// It is designed to be run and accessed from a particular thread/runloop.
+		// As such, it is faster as it does not have the overhead of locks/synchronization.
+		// 
+		// However, this places a minimal requirement on the developer to maintain thread-safety.
+		// If you are seeing errors or crashes in AsyncSocket,
+		// it is very likely that thread-safety has been broken.
+		// This method may be enabled via the DEBUG_THREAD_SAFETY macro,
+		// and will allow you to discover the place in your code where thread-safety is being broken.
+		// 
+		// Note:
+		// 
+		// If you find you constantly need to access your socket from various threads,
+		// you may prefer to use GCDAsyncSocket which is thread-safe.
+		
+		[NSException raise:AsyncSocketException
+		            format:@"Attempting to access AsyncSocket instance from incorrect thread."];
+	}
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Accessors
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+- (long)userData
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	return theUserData;
+}
+
+- (void)setUserData:(long)userData
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	theUserData = userData;
+}
+
+- (id)delegate
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	return theDelegate;
+}
+
+- (void)setDelegate:(id)delegate
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	theDelegate = delegate;
+}
+
+- (BOOL)canSafelySetDelegate
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	return ([theReadQueue count] == 0 && [theWriteQueue count] == 0 && theCurrentRead == nil && theCurrentWrite == nil);
+}
+
+- (CFSocketRef)getCFSocket
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	if(theSocket4)
+		return theSocket4;
+	else
+		return theSocket6;
+}
+
+- (CFReadStreamRef)getCFReadStream
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	return theReadStream;
+}
+
+- (CFWriteStreamRef)getCFWriteStream
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	return theWriteStream;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Progress
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+- (float)progressOfReadReturningTag:(long *)tag bytesDone:(NSUInteger *)done total:(NSUInteger *)total
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	// Check to make sure we're actually reading something right now,
+	// and that the read packet isn't an AsyncSpecialPacket (upgrade to TLS).
+	if (!theCurrentRead || ![theCurrentRead isKindOfClass:[AsyncReadPacket class]])
+	{
+		if (tag != NULL)   *tag = 0;
+		if (done != NULL)  *done = 0;
+		if (total != NULL) *total = 0;
+		
+		return NAN;
+	}
+	
+	// It's only possible to know the progress of our read if we're reading to a certain length.
+	// If we're reading to data, we of course have no idea when the data will arrive.
+	// If we're reading to timeout, then we have no idea when the next chunk of data will arrive.
+	
+	NSUInteger d = theCurrentRead->bytesDone;
+	NSUInteger t = theCurrentRead->readLength;
+	
+	if (tag != NULL)   *tag = theCurrentRead->tag;
+	if (done != NULL)  *done = d;
+	if (total != NULL) *total = t;
+	
+	if (t > 0.0)
+		return (float)d / (float)t;
+	else
+		return 1.0F;
+}
+
+- (float)progressOfWriteReturningTag:(long *)tag bytesDone:(NSUInteger *)done total:(NSUInteger *)total
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	// Check to make sure we're actually writing something right now,
+	// and that the write packet isn't an AsyncSpecialPacket (upgrade to TLS).
+	if (!theCurrentWrite || ![theCurrentWrite isKindOfClass:[AsyncWritePacket class]])
+	{
+		if (tag != NULL)   *tag = 0;
+		if (done != NULL)  *done = 0;
+		if (total != NULL) *total = 0;
+		
+		return NAN;
+	}
+	
+	NSUInteger d = theCurrentWrite->bytesDone;
+	NSUInteger t = [theCurrentWrite->buffer length];
+	
+	if (tag != NULL)   *tag = theCurrentWrite->tag;
+	if (done != NULL)  *done = d;
+	if (total != NULL) *total = t;
+	
+	return (float)d / (float)t;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Run Loop
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+- (void)runLoopAddSource:(CFRunLoopSourceRef)source
+{
+	for (NSString *runLoopMode in theRunLoopModes)
+	{
+		CFRunLoopAddSource(theRunLoop, source, (__bridge CFStringRef)runLoopMode);
+	}
+}
+
+- (void)runLoopRemoveSource:(CFRunLoopSourceRef)source
+{
+	for (NSString *runLoopMode in theRunLoopModes)
+	{
+		CFRunLoopRemoveSource(theRunLoop, source, (__bridge CFStringRef)runLoopMode);
+	}
+}
+
+- (void)runLoopAddSource:(CFRunLoopSourceRef)source mode:(NSString *)runLoopMode
+{
+	CFRunLoopAddSource(theRunLoop, source, (__bridge CFStringRef)runLoopMode);
+}
+
+- (void)runLoopRemoveSource:(CFRunLoopSourceRef)source mode:(NSString *)runLoopMode
+{
+	CFRunLoopRemoveSource(theRunLoop, source, (__bridge CFStringRef)runLoopMode);
+}
+
+- (void)runLoopAddTimer:(NSTimer *)timer
+{
+	for (NSString *runLoopMode in theRunLoopModes)
+	{
+		CFRunLoopAddTimer(theRunLoop, (__bridge CFRunLoopTimerRef)timer, (__bridge CFStringRef)runLoopMode);
+	}
+}
+
+- (void)runLoopRemoveTimer:(NSTimer *)timer
+{
+	for (NSString *runLoopMode in theRunLoopModes)		
+	{
+		CFRunLoopRemoveTimer(theRunLoop, (__bridge CFRunLoopTimerRef)timer, (__bridge CFStringRef)runLoopMode);
+	}
+}
+
+- (void)runLoopAddTimer:(NSTimer *)timer mode:(NSString *)runLoopMode
+{
+	CFRunLoopAddTimer(theRunLoop, (__bridge CFRunLoopTimerRef)timer, (__bridge CFStringRef)runLoopMode);
+}
+
+- (void)runLoopRemoveTimer:(NSTimer *)timer mode:(NSString *)runLoopMode
+{
+	CFRunLoopRemoveTimer(theRunLoop, (__bridge CFRunLoopTimerRef)timer, (__bridge CFStringRef)runLoopMode);
+}
+
+- (void)runLoopUnscheduleReadStream
+{
+	for (NSString *runLoopMode in theRunLoopModes)
+	{
+		CFReadStreamUnscheduleFromRunLoop(theReadStream, theRunLoop, (__bridge CFStringRef)runLoopMode);
+	}
+	CFReadStreamSetClient(theReadStream, kCFStreamEventNone, NULL, NULL);
+}
+
+- (void)runLoopUnscheduleWriteStream
+{
+	for (NSString *runLoopMode in theRunLoopModes)
+	{
+		CFWriteStreamUnscheduleFromRunLoop(theWriteStream, theRunLoop, (__bridge CFStringRef)runLoopMode);
+	}
+	CFWriteStreamSetClient(theWriteStream, kCFStreamEventNone, NULL, NULL);
+}
+
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Configuration
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+/**
+ * See the header file for a full explanation of pre-buffering.
+**/
+- (void)enablePreBuffering
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	theFlags |= kEnablePreBuffering;
+}
+
+/**
+ * See the header file for a full explanation of this method.
+**/
+- (BOOL)moveToRunLoop:(NSRunLoop *)runLoop
+{
+	NSAssert((theRunLoop == NULL) || (theRunLoop == CFRunLoopGetCurrent()),
+			 @"moveToRunLoop must be called from within the current RunLoop!");
+	
+	if(runLoop == nil)
+	{
+		return NO;
+	}
+	if(theRunLoop == [runLoop getCFRunLoop])
+	{
+		return YES;
+	}
+	
+	[NSObject cancelPreviousPerformRequestsWithTarget:self];
+	theFlags &= ~kDequeueReadScheduled;
+	theFlags &= ~kDequeueWriteScheduled;
+	
+	if(theReadStream && theWriteStream)
+    {
+        [self runLoopUnscheduleReadStream];
+        [self runLoopUnscheduleWriteStream];
+    }
+    
+	if(theSource4) [self runLoopRemoveSource:theSource4];
+	if(theSource6) [self runLoopRemoveSource:theSource6];
+	
+	if(theReadTimer) [self runLoopRemoveTimer:theReadTimer];
+	if(theWriteTimer) [self runLoopRemoveTimer:theWriteTimer];
+	
+	theRunLoop = [runLoop getCFRunLoop];
+	
+	if(theReadTimer) [self runLoopAddTimer:theReadTimer];
+	if(theWriteTimer) [self runLoopAddTimer:theWriteTimer];
+	
+	if(theSource4) [self runLoopAddSource:theSource4];
+	if(theSource6) [self runLoopAddSource:theSource6];
+    
+    if(theReadStream && theWriteStream)
+	{
+		if(![self attachStreamsToRunLoop:runLoop error:nil])
+		{
+			return NO;
+		}
+	}
+	
+	[runLoop performSelector:@selector(maybeDequeueRead) target:self argument:nil order:0 modes:theRunLoopModes];
+	[runLoop performSelector:@selector(maybeDequeueWrite) target:self argument:nil order:0 modes:theRunLoopModes];
+	[runLoop performSelector:@selector(maybeScheduleDisconnect) target:self argument:nil order:0 modes:theRunLoopModes];
+	
+	return YES;
+}
+
+/**
+ * See the header file for a full explanation of this method.
+**/
+- (BOOL)setRunLoopModes:(NSArray *)runLoopModes
+{
+	NSAssert((theRunLoop == NULL) || (theRunLoop == CFRunLoopGetCurrent()),
+			 @"setRunLoopModes must be called from within the current RunLoop!");
+	
+	if([runLoopModes count] == 0)
+	{
+		return NO;
+	}
+	if([theRunLoopModes isEqualToArray:runLoopModes])
+	{
+		return YES;
+	}
+	
+	[NSObject cancelPreviousPerformRequestsWithTarget:self];
+	theFlags &= ~kDequeueReadScheduled;
+	theFlags &= ~kDequeueWriteScheduled;
+	
+	if(theReadStream && theWriteStream)
+    {
+        [self runLoopUnscheduleReadStream];
+        [self runLoopUnscheduleWriteStream];
+    }
+    
+	if(theSource4) [self runLoopRemoveSource:theSource4];
+	if(theSource6) [self runLoopRemoveSource:theSource6];
+	
+	if(theReadTimer) [self runLoopRemoveTimer:theReadTimer];
+	if(theWriteTimer) [self runLoopRemoveTimer:theWriteTimer];
+	
+	theRunLoopModes = [runLoopModes copy];
+	
+	if(theReadTimer) [self runLoopAddTimer:theReadTimer];
+	if(theWriteTimer) [self runLoopAddTimer:theWriteTimer];
+	
+	if(theSource4) [self runLoopAddSource:theSource4];
+	if(theSource6) [self runLoopAddSource:theSource6];
+    
+	if(theReadStream && theWriteStream)
+	{
+		// Note: theRunLoop variable is a CFRunLoop, and NSRunLoop is NOT toll-free bridged with CFRunLoop.
+		// So we cannot pass theRunLoop to the method below, which is expecting a NSRunLoop parameter.
+		// Instead we pass nil, which will result in the method properly using the current run loop.
+		
+		if(![self attachStreamsToRunLoop:nil error:nil])
+		{
+			return NO;
+		}
+	}
+	
+	[self performSelector:@selector(maybeDequeueRead) withObject:nil afterDelay:0 inModes:theRunLoopModes];
+	[self performSelector:@selector(maybeDequeueWrite) withObject:nil afterDelay:0 inModes:theRunLoopModes];
+	[self performSelector:@selector(maybeScheduleDisconnect) withObject:nil afterDelay:0 inModes:theRunLoopModes];
+	
+	return YES;
+}
+
+- (BOOL)addRunLoopMode:(NSString *)runLoopMode
+{
+	NSAssert((theRunLoop == NULL) || (theRunLoop == CFRunLoopGetCurrent()),
+			 @"addRunLoopMode must be called from within the current RunLoop!");
+	
+	if(runLoopMode == nil)
+	{
+		return NO;
+	}
+	if([theRunLoopModes containsObject:runLoopMode])
+	{
+		return YES;
+	}
+	
+	[NSObject cancelPreviousPerformRequestsWithTarget:self];
+	theFlags &= ~kDequeueReadScheduled;
+	theFlags &= ~kDequeueWriteScheduled;
+    
+	NSArray *newRunLoopModes = [theRunLoopModes arrayByAddingObject:runLoopMode];
+	theRunLoopModes = newRunLoopModes;
+	
+	if(theReadTimer)  [self runLoopAddTimer:theReadTimer  mode:runLoopMode];
+	if(theWriteTimer) [self runLoopAddTimer:theWriteTimer mode:runLoopMode];
+	
+	if(theSource4) [self runLoopAddSource:theSource4 mode:runLoopMode];
+	if(theSource6) [self runLoopAddSource:theSource6 mode:runLoopMode];
+    
+	if(theReadStream && theWriteStream)
+	{
+		CFReadStreamScheduleWithRunLoop(theReadStream, CFRunLoopGetCurrent(), (__bridge CFStringRef)runLoopMode);
+		CFWriteStreamScheduleWithRunLoop(theWriteStream, CFRunLoopGetCurrent(), (__bridge CFStringRef)runLoopMode);
+	}
+	
+	[self performSelector:@selector(maybeDequeueRead) withObject:nil afterDelay:0 inModes:theRunLoopModes];
+	[self performSelector:@selector(maybeDequeueWrite) withObject:nil afterDelay:0 inModes:theRunLoopModes];
+	[self performSelector:@selector(maybeScheduleDisconnect) withObject:nil afterDelay:0 inModes:theRunLoopModes];
+	
+	return YES;
+}
+
+- (BOOL)removeRunLoopMode:(NSString *)runLoopMode
+{
+	NSAssert((theRunLoop == NULL) || (theRunLoop == CFRunLoopGetCurrent()),
+			 @"addRunLoopMode must be called from within the current RunLoop!");
+	
+	if(runLoopMode == nil)
+	{
+		return NO;
+	}
+	if(![theRunLoopModes containsObject:runLoopMode])
+	{
+		return YES;
+	}
+	
+	NSMutableArray *newRunLoopModes = [theRunLoopModes mutableCopy];
+	[newRunLoopModes removeObject:runLoopMode];
+	
+	if([newRunLoopModes count] == 0)
+	{
+		return NO;
+	}
+	
+	[NSObject cancelPreviousPerformRequestsWithTarget:self];
+	theFlags &= ~kDequeueReadScheduled;
+	theFlags &= ~kDequeueWriteScheduled;
+	
+	theRunLoopModes = [newRunLoopModes copy];
+	
+	if(theReadTimer)  [self runLoopRemoveTimer:theReadTimer  mode:runLoopMode];
+	if(theWriteTimer) [self runLoopRemoveTimer:theWriteTimer mode:runLoopMode];
+	
+	if(theSource4) [self runLoopRemoveSource:theSource4 mode:runLoopMode];
+	if(theSource6) [self runLoopRemoveSource:theSource6 mode:runLoopMode];
+    
+	if(theReadStream && theWriteStream)
+	{
+		CFReadStreamScheduleWithRunLoop(theReadStream, CFRunLoopGetCurrent(), (__bridge CFStringRef)runLoopMode);
+		CFWriteStreamScheduleWithRunLoop(theWriteStream, CFRunLoopGetCurrent(), (__bridge CFStringRef)runLoopMode);
+	}
+	
+	[self performSelector:@selector(maybeDequeueRead) withObject:nil afterDelay:0 inModes:theRunLoopModes];
+	[self performSelector:@selector(maybeDequeueWrite) withObject:nil afterDelay:0 inModes:theRunLoopModes];
+	[self performSelector:@selector(maybeScheduleDisconnect) withObject:nil afterDelay:0 inModes:theRunLoopModes];
+	
+	return YES;
+}
+
+- (NSArray *)runLoopModes
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	return theRunLoopModes;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Accepting
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+- (BOOL)acceptOnPort:(UInt16)port error:(NSError **)errPtr
+{
+	return [self acceptOnInterface:nil port:port error:errPtr];
+}
+	
+/**
+ * To accept on a certain interface, pass the address to accept on.
+ * To accept on any interface, pass nil or an empty string.
+ * To accept only connections from localhost pass "localhost" or "loopback".
+**/
+- (BOOL)acceptOnInterface:(NSString *)interface port:(UInt16)port error:(NSError **)errPtr
+{
+	if (theDelegate == NULL)
+    {
+		[NSException raise:AsyncSocketException
+		            format:@"Attempting to accept without a delegate. Set a delegate first."];
+    }
+	
+	if (![self isDisconnected])
+    {
+		[NSException raise:AsyncSocketException
+		            format:@"Attempting to accept while connected or accepting connections. Disconnect first."];
+    }
+	
+	// Clear queues (spurious read/write requests post disconnect)
+	[self emptyQueues];
+
+	// Set up the listen sockaddr structs if needed.
+	
+	NSData *address4 = nil, *address6 = nil;
+	if(interface == nil || ([interface length] == 0))
+	{
+		// Accept on ANY address
+		struct sockaddr_in nativeAddr4;
+		nativeAddr4.sin_len         = sizeof(struct sockaddr_in);
+		nativeAddr4.sin_family      = AF_INET;
+		nativeAddr4.sin_port        = htons(port);
+		nativeAddr4.sin_addr.s_addr = htonl(INADDR_ANY);
+		memset(&(nativeAddr4.sin_zero), 0, sizeof(nativeAddr4.sin_zero));
+		
+		struct sockaddr_in6 nativeAddr6;
+		nativeAddr6.sin6_len       = sizeof(struct sockaddr_in6);
+		nativeAddr6.sin6_family    = AF_INET6;
+		nativeAddr6.sin6_port      = htons(port);
+		nativeAddr6.sin6_flowinfo  = 0;
+		nativeAddr6.sin6_addr      = in6addr_any;
+		nativeAddr6.sin6_scope_id  = 0;
+		
+		// Wrap the native address structures for CFSocketSetAddress.
+		address4 = [NSData dataWithBytes:&nativeAddr4 length:sizeof(nativeAddr4)];
+		address6 = [NSData dataWithBytes:&nativeAddr6 length:sizeof(nativeAddr6)];
+	}
+	else if([interface isEqualToString:@"localhost"] || [interface isEqualToString:@"loopback"])
+	{
+		// Accept only on LOOPBACK address
+		struct sockaddr_in nativeAddr4;
+		nativeAddr4.sin_len         = sizeof(struct sockaddr_in);
+		nativeAddr4.sin_family      = AF_INET;
+		nativeAddr4.sin_port        = htons(port);
+		nativeAddr4.sin_addr.s_addr = htonl(INADDR_LOOPBACK);
+		memset(&(nativeAddr4.sin_zero), 0, sizeof(nativeAddr4.sin_zero));
+	
+		struct sockaddr_in6 nativeAddr6;
+		nativeAddr6.sin6_len       = sizeof(struct sockaddr_in6);
+		nativeAddr6.sin6_family    = AF_INET6;
+		nativeAddr6.sin6_port      = htons(port);
+		nativeAddr6.sin6_flowinfo  = 0;
+		nativeAddr6.sin6_addr      = in6addr_loopback;
+		nativeAddr6.sin6_scope_id  = 0;
+		
+		// Wrap the native address structures for CFSocketSetAddress.
+		address4 = [NSData dataWithBytes:&nativeAddr4 length:sizeof(nativeAddr4)];
+		address6 = [NSData dataWithBytes:&nativeAddr6 length:sizeof(nativeAddr6)];
+	}
+	else
+	{
+		NSString *portStr = [NSString stringWithFormat:@"%hu", port];
+
+		struct addrinfo hints, *res, *res0;
+		
+		memset(&hints, 0, sizeof(hints));
+		hints.ai_family   = PF_UNSPEC;
+		hints.ai_socktype = SOCK_STREAM;
+		hints.ai_protocol = IPPROTO_TCP;
+		hints.ai_flags    = AI_PASSIVE;
+		
+		int error = getaddrinfo([interface UTF8String], [portStr UTF8String], &hints, &res0);
+		
+		if (error)
+		{
+			if (errPtr)
+			{
+				NSString *errMsg = [NSString stringWithCString:gai_strerror(error) encoding:NSASCIIStringEncoding];
+				NSDictionary *info = [NSDictionary dictionaryWithObject:errMsg forKey:NSLocalizedDescriptionKey];
+				
+				*errPtr = [NSError errorWithDomain:@"kCFStreamErrorDomainNetDB" code:error userInfo:info];
+			}
+		}
+		else
+		{
+			for (res = res0; res; res = res->ai_next)
+			{
+				if (!address4 && (res->ai_family == AF_INET))
+				{
+					// Found IPv4 address
+					// Wrap the native address structures for CFSocketSetAddress.
+					address4 = [NSData dataWithBytes:res->ai_addr length:res->ai_addrlen];
+				}
+				else if (!address6 && (res->ai_family == AF_INET6))
+				{
+					// Found IPv6 address
+					// Wrap the native address structures for CFSocketSetAddress.
+					address6 = [NSData dataWithBytes:res->ai_addr length:res->ai_addrlen];
+				}
+			}
+			freeaddrinfo(res0);
+		}
+		
+		if(!address4 && !address6) return NO;
+	}
+
+	// Create the sockets.
+
+	if (address4)
+	{
+		theSocket4 = [self newAcceptSocketForAddress:address4 error:errPtr];
+		if (theSocket4 == NULL) goto Failed;
+	}
+	
+	if (address6)
+	{
+		theSocket6 = [self newAcceptSocketForAddress:address6 error:errPtr];
+		
+		// Note: The iPhone doesn't currently support IPv6
+		
+#if !TARGET_OS_IPHONE
+		if (theSocket6 == NULL) goto Failed;
+#endif
+	}
+	
+	// Attach the sockets to the run loop so that callback methods work
+	
+	[self attachSocketsToRunLoop:nil error:nil];
+	
+	// Set the SO_REUSEADDR flags.
+
+	int reuseOn = 1;
+	if (theSocket4)	setsockopt(CFSocketGetNative(theSocket4), SOL_SOCKET, SO_REUSEADDR, &reuseOn, sizeof(reuseOn));
+	if (theSocket6)	setsockopt(CFSocketGetNative(theSocket6), SOL_SOCKET, SO_REUSEADDR, &reuseOn, sizeof(reuseOn));
+
+	// Set the local bindings which causes the sockets to start listening.
+
+	CFSocketError err;
+	if (theSocket4)
+	{
+		err = CFSocketSetAddress(theSocket4, (__bridge CFDataRef)address4);
+		if (err != kCFSocketSuccess) goto Failed;
+		
+		//NLLogNVPInfo(@"theSocket4: %hu", [self localPortFromCFSocket4:theSocket4]);
+	}
+	
+	if(port == 0 && theSocket4 && theSocket6)
+	{
+		// The user has passed in port 0, which means he wants to allow the kernel to choose the port for them
+		// However, the kernel will choose a different port for both theSocket4 and theSocket6
+		// So we grab the port the kernel choose for theSocket4, and set it as the port for theSocket6
+		UInt16 chosenPort = [self localPortFromCFSocket4:theSocket4];
+		
+		struct sockaddr_in6 *pSockAddr6 = (struct sockaddr_in6 *)[address6 bytes];
+		if (pSockAddr6) // If statement to quiet the static analyzer
+		{
+			pSockAddr6->sin6_port = htons(chosenPort);
+		}
+    }
+	
+	if (theSocket6)
+	{
+		err = CFSocketSetAddress(theSocket6, (__bridge CFDataRef)address6);
+		if (err != kCFSocketSuccess) goto Failed;
+		
+		//NLLogNVPInfo(@"theSocket6: %hu", [self localPortFromCFSocket6:theSocket6]);
+	}
+
+	theFlags |= kDidStartDelegate;
+	return YES;
+	
+Failed:
+	if(errPtr) *errPtr = [self getSocketError];
+	if(theSocket4 != NULL)
+	{
+		CFSocketInvalidate(theSocket4);
+		CFRelease(theSocket4);
+		theSocket4 = NULL;
+	}
+	if(theSocket6 != NULL)
+	{
+		CFSocketInvalidate(theSocket6);
+		CFRelease(theSocket6);
+		theSocket6 = NULL;
+	}
+	return NO;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Connecting
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+- (BOOL)connectToHost:(NSString*)hostname onPort:(UInt16)port error:(NSError **)errPtr
+{
+	return [self connectToHost:hostname onPort:port withTimeout:-1 error:errPtr];
+}
+
+/**
+ * This method creates an initial CFReadStream and CFWriteStream to the given host on the given port.
+ * The connection is then opened, and the corresponding CFSocket will be extracted after the connection succeeds.
+ *
+ * Thus the delegate will have access to the CFReadStream and CFWriteStream prior to connection,
+ * specifically in the onSocketWillConnect: method.
+**/
+- (BOOL)connectToHost:(NSString *)hostname
+			   onPort:(UInt16)port
+		  withTimeout:(NSTimeInterval)timeout
+				error:(NSError **)errPtr
+{
+	if (theDelegate == NULL)
+	{
+		[NSException raise:AsyncSocketException
+		            format:@"Attempting to connect without a delegate. Set a delegate first."];
+	}
+
+	if (![self isDisconnected])
+	{
+		[NSException raise:AsyncSocketException
+		            format:@"Attempting to connect while connected or accepting connections. Disconnect first."];
+	}
+	
+	// Clear queues (spurious read/write requests post disconnect)
+	[self emptyQueues];
+	
+	if(![self createStreamsToHost:hostname onPort:port error:errPtr]) goto Failed;
+	if(![self attachStreamsToRunLoop:nil error:errPtr])               goto Failed;
+	if(![self configureStreamsAndReturnError:errPtr])                 goto Failed;
+	if(![self openStreamsAndReturnError:errPtr])                      goto Failed;
+	
+	[self startConnectTimeout:timeout];
+	theFlags |= kDidStartDelegate;
+	
+	return YES;
+	
+Failed:
+	[self close];
+	return NO;
+}
+
+- (BOOL)connectToAddress:(NSData *)remoteAddr error:(NSError **)errPtr
+{
+	return [self connectToAddress:remoteAddr viaInterfaceAddress:nil withTimeout:-1 error:errPtr];
+}
+
+/**
+ * This method creates an initial CFSocket to the given address.
+ * The connection is then opened, and the corresponding CFReadStream and CFWriteStream will be
+ * created from the low-level sockets after the connection succeeds.
+ *
+ * Thus the delegate will have access to the CFSocket and CFSocketNativeHandle (BSD socket) prior to connection,
+ * specifically in the onSocketWillConnect: method.
+ * 
+ * Note: The NSData parameter is expected to be a sockaddr structure. For example, an NSData object returned from
+ * NSNetService addresses method.
+ * If you have an existing struct sockaddr you can convert it to an NSData object like so:
+ * struct sockaddr sa  -> NSData *dsa = [NSData dataWithBytes:&remoteAddr length:remoteAddr.sa_len];
+ * struct sockaddr *sa -> NSData *dsa = [NSData dataWithBytes:remoteAddr length:remoteAddr->sa_len];
+**/
+- (BOOL)connectToAddress:(NSData *)remoteAddr withTimeout:(NSTimeInterval)timeout error:(NSError **)errPtr
+{
+	return [self connectToAddress:remoteAddr viaInterfaceAddress:nil withTimeout:timeout error:errPtr];
+}
+
+/**
+ * This method is similar to the one above, but allows you to specify which socket interface
+ * the connection should run over. E.g. ethernet, wifi, bluetooth, etc.
+**/
+- (BOOL)connectToAddress:(NSData *)remoteAddr
+     viaInterfaceAddress:(NSData *)interfaceAddr
+             withTimeout:(NSTimeInterval)timeout
+                   error:(NSError **)errPtr
+{
+	if (theDelegate == NULL)
+	{
+		[NSException raise:AsyncSocketException
+		            format:@"Attempting to connect without a delegate. Set a delegate first."];
+	}
+	
+	if (![self isDisconnected])
+	{
+		[NSException raise:AsyncSocketException
+		            format:@"Attempting to connect while connected or accepting connections. Disconnect first."];
+	}
+	
+	// Clear queues (spurious read/write requests post disconnect)
+	[self emptyQueues];
+	
+	if(![self createSocketForAddress:remoteAddr error:errPtr])   goto Failed;
+	if(![self bindSocketToAddress:interfaceAddr error:errPtr])   goto Failed;
+	if(![self attachSocketsToRunLoop:nil error:errPtr])          goto Failed;
+	if(![self configureSocketAndReturnError:errPtr])             goto Failed;
+	if(![self connectSocketToAddress:remoteAddr error:errPtr])   goto Failed;
+	
+	[self startConnectTimeout:timeout];
+	theFlags |= kDidStartDelegate;
+	
+	return YES;
+	
+Failed:
+	[self close];
+	return NO;
+}
+
+- (void)startConnectTimeout:(NSTimeInterval)timeout
+{
+	if(timeout >= 0.0)
+	{
+        NexusTalkTimerTarget *timerTarget = [[NexusTalkTimerTarget alloc] init];
+        timerTarget.actualTarget = self;
+		theConnectTimer = [NSTimer timerWithTimeInterval:timeout
+											      target:timerTarget
+											    selector:@selector(timerFired:)
+											    userInfo:nil
+											     repeats:NO];
+		[self runLoopAddTimer:theConnectTimer];
+	}
+}
+
+- (void)endConnectTimeout
+{
+	[theConnectTimer invalidate];
+	theConnectTimer = nil;
+}
+
+- (void)doConnectTimeout:(NSTimer *)timer
+{
+	#pragma unused(timer)
+	
+	[self endConnectTimeout];
+	[self closeWithError:[self getConnectTimeoutError]];
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Socket Implementation
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+/**
+ * Creates the accept sockets.
+ * Returns true if either IPv4 or IPv6 is created.
+ * If either is missing, an error is returned (even though the method may return true).
+**/
+- (CFSocketRef)newAcceptSocketForAddress:(NSData *)addr error:(NSError **)errPtr
+{
+	struct sockaddr *pSockAddr = (struct sockaddr *)[addr bytes];
+	int addressFamily = pSockAddr->sa_family;
+	
+	CFSocketRef theSocket = CFSocketCreate(kCFAllocatorDefault,
+	                                       addressFamily,
+	                                       SOCK_STREAM,
+	                                       0,
+	                                       kCFSocketAcceptCallBack,                // Callback flags
+	                                       (CFSocketCallBack)&MyCFSocketCallback,  // Callback method
+	                                       &theContext);
+
+	if(theSocket == NULL)
+	{
+		if(errPtr) *errPtr = [self getSocketError];
+	}
+	
+	return theSocket;
+}
+
+- (BOOL)createSocketForAddress:(NSData *)remoteAddr error:(NSError **)errPtr
+{
+	struct sockaddr *pSockAddr = (struct sockaddr *)[remoteAddr bytes];
+	
+	if(pSockAddr->sa_family == AF_INET)
+	{
+		theSocket4 = CFSocketCreate(NULL,                                   // Default allocator
+		                            PF_INET,                                // Protocol Family
+		                            SOCK_STREAM,                            // Socket Type
+		                            IPPROTO_TCP,                            // Protocol
+		                            kCFSocketConnectCallBack,               // Callback flags
+		                            (CFSocketCallBack)&MyCFSocketCallback,  // Callback method
+		                            &theContext);                           // Socket Context
+		
+		if(theSocket4 == NULL)
+		{
+			if (errPtr) *errPtr = [self getSocketError];
+			return NO;
+		}
+	}
+	else if(pSockAddr->sa_family == AF_INET6)
+	{
+		theSocket6 = CFSocketCreate(NULL,                                   // Default allocator
+								    PF_INET6,                               // Protocol Family
+								    SOCK_STREAM,                            // Socket Type
+								    IPPROTO_TCP,                            // Protocol
+								    kCFSocketConnectCallBack,               // Callback flags
+								    (CFSocketCallBack)&MyCFSocketCallback,  // Callback method
+								    &theContext);                           // Socket Context
+		
+		if(theSocket6 == NULL)
+		{
+			if (errPtr) *errPtr = [self getSocketError];
+			return NO;
+		}
+	}
+	else
+	{
+		if (errPtr)
+		{
+			NSString *errMsg = @"Remote address is not IPv4 or IPv6";
+			NSDictionary *info = [NSDictionary dictionaryWithObject:errMsg forKey:NSLocalizedDescriptionKey];
+			
+			*errPtr = [NSError errorWithDomain:AsyncSocketErrorDomain code:AsyncSocketCFSocketError userInfo:info];
+		}
+		return NO;
+	}
+	
+	return YES;
+}
+
+- (BOOL)bindSocketToAddress:(NSData *)interfaceAddr error:(NSError **)errPtr
+{
+	if (interfaceAddr == nil) return YES;
+	
+	struct sockaddr *pSockAddr = (struct sockaddr *)[interfaceAddr bytes];
+	
+	CFSocketRef theSocket = (theSocket4 != NULL) ? theSocket4 : theSocket6;
+	NSAssert((theSocket != NULL), @"bindSocketToAddress called without valid socket");
+	
+	CFSocketNativeHandle nativeSocket = CFSocketGetNative(theSocket);
+	
+	if (pSockAddr->sa_family == AF_INET || pSockAddr->sa_family == AF_INET6)
+	{
+		int result = bind(nativeSocket, pSockAddr, (socklen_t)[interfaceAddr length]);
+		if (result != 0)
+		{
+			if (errPtr) *errPtr = [self getErrnoError];
+			return NO;
+		}
+	}
+	else
+	{
+		if (errPtr)
+		{
+			NSString *errMsg = @"Interface address is not IPv4 or IPv6";
+			NSDictionary *info = [NSDictionary dictionaryWithObject:errMsg forKey:NSLocalizedDescriptionKey];
+			
+			*errPtr = [NSError errorWithDomain:AsyncSocketErrorDomain code:AsyncSocketCFSocketError userInfo:info];
+		}
+		return NO;
+	}
+	
+	return YES;
+}
+
+/**
+ * Adds the CFSocket's to the run-loop so that callbacks will work properly.
+**/
+- (BOOL)attachSocketsToRunLoop:(NSRunLoop *)runLoop error:(NSError **)errPtr
+{
+	#pragma unused(errPtr)
+	
+	// Get the CFRunLoop to which the socket should be attached.
+	theRunLoop = (runLoop == nil) ? CFRunLoopGetCurrent() : [runLoop getCFRunLoop];
+	
+	if(theSocket4)
+	{
+		theSource4 = CFSocketCreateRunLoopSource (kCFAllocatorDefault, theSocket4, 0);
+        [self runLoopAddSource:theSource4];
+	}
+	
+	if(theSocket6)
+	{
+		theSource6 = CFSocketCreateRunLoopSource (kCFAllocatorDefault, theSocket6, 0);
+        [self runLoopAddSource:theSource6];
+	}
+	
+	return YES;
+}
+
+/**
+ * Allows the delegate method to configure the CFSocket or CFNativeSocket as desired before we connect.
+ * Note that the CFReadStream and CFWriteStream will not be available until after the connection is opened.
+**/
+- (BOOL)configureSocketAndReturnError:(NSError **)errPtr
+{
+	// Call the delegate method for further configuration.
+	if([theDelegate respondsToSelector:@selector(onSocketWillConnect:)])
+	{
+		if([theDelegate onSocketWillConnect:self] == NO)
+		{
+			if (errPtr) *errPtr = [self getAbortError];
+			return NO;
+		}
+	}
+	return YES;
+}
+
+- (BOOL)connectSocketToAddress:(NSData *)remoteAddr error:(NSError **)errPtr
+{
+	// Start connecting to the given address in the background
+	// The MyCFSocketCallback method will be called when the connection succeeds or fails
+	if(theSocket4)
+	{
+		CFSocketError err = CFSocketConnectToAddress(theSocket4, (__bridge CFDataRef)remoteAddr, -1);
+		if(err != kCFSocketSuccess)
+		{
+			if (errPtr) *errPtr = [self getSocketError];
+			return NO;
+		}
+	}
+	else if(theSocket6)
+	{
+		CFSocketError err = CFSocketConnectToAddress(theSocket6, (__bridge CFDataRef)remoteAddr, -1);
+		if(err != kCFSocketSuccess)
+		{
+			if (errPtr) *errPtr = [self getSocketError];
+			return NO;
+		}
+	}
+	
+	return YES;
+}
+
+/**
+ * Attempt to make the new socket.
+ * If an error occurs, ignore this event.
+**/
+- (void)doAcceptFromSocket:(CFSocketRef)parentSocket withNewNativeSocket:(CFSocketNativeHandle)newNativeSocket
+{
+	if(newNativeSocket)
+	{
+		// New socket inherits same delegate and run loop modes.
+		// Note: We use [self class] to support subclassing AsyncSocket.
+		AsyncSocket *newSocket = [[[self class] alloc] initWithDelegate:theDelegate];
+		[newSocket setRunLoopModes:theRunLoopModes];
+		
+		if (![newSocket createStreamsFromNative:newNativeSocket error:nil])
+		{
+			[newSocket close];
+			return;
+		}
+		
+		if (parentSocket == theSocket4)
+			newSocket->theNativeSocket4 = newNativeSocket;
+		else
+			newSocket->theNativeSocket6 = newNativeSocket;
+		
+		if ([theDelegate respondsToSelector:@selector(onSocket:didAcceptNewSocket:)])
+			[theDelegate onSocket:self didAcceptNewSocket:newSocket];
+		
+		newSocket->theFlags |= kDidStartDelegate;
+		
+		NSRunLoop *runLoop = nil;
+		if ([theDelegate respondsToSelector:@selector(onSocket:wantsRunLoopForNewSocket:)])
+		{
+			runLoop = [theDelegate onSocket:self wantsRunLoopForNewSocket:newSocket];
+		}
+		
+		if(![newSocket attachStreamsToRunLoop:runLoop error:nil]) goto Failed;
+		if(![newSocket configureStreamsAndReturnError:nil])       goto Failed;
+		if(![newSocket openStreamsAndReturnError:nil])            goto Failed;
+		
+		return;
+		
+	Failed:
+		[newSocket close];
+	}
+}
+
+/**
+ * This method is called as a result of connectToAddress:withTimeout:error:.
+ * At this point we have an open CFSocket from which we need to create our read and write stream.
+**/
+- (void)doSocketOpen:(CFSocketRef)sock withCFSocketError:(CFSocketError)socketError
+{
+	NSParameterAssert ((sock == theSocket4) || (sock == theSocket6));
+	
+	if(socketError == kCFSocketTimeout || socketError == kCFSocketError)
+	{
+		[self closeWithError:[self getSocketError]];
+		return;
+	}
+	
+	// Get the underlying native (BSD) socket
+	CFSocketNativeHandle nativeSocket = CFSocketGetNative(sock);
+	
+	// Store a reference to it
+	if (sock == theSocket4)
+		theNativeSocket4 = nativeSocket;
+	else
+		theNativeSocket6 = nativeSocket;
+	
+	// Setup the CFSocket so that invalidating it will not close the underlying native socket
+	CFSocketSetSocketFlags(sock, 0);
+	
+	// Invalidate and release the CFSocket - All we need from here on out is the nativeSocket.
+	// Note: If we don't invalidate the CFSocket (leaving the native socket open)
+	// then theReadStream and theWriteStream won't function properly.
+	// Specifically, their callbacks won't work, with the exception of kCFStreamEventOpenCompleted.
+	// 
+	// This is likely due to the mixture of the CFSocketCreateWithNative method,
+	// along with the CFStreamCreatePairWithSocket method.
+	// The documentation for CFSocketCreateWithNative states:
+	//   
+	//   If a CFSocket object already exists for sock,
+	//   the function returns the pre-existing object instead of creating a new object;
+	//   the context, callout, and callBackTypes parameters are ignored in this case.
+	// 
+	// So the CFStreamCreateWithNative method invokes the CFSocketCreateWithNative method,
+	// thinking that is creating a new underlying CFSocket for it's own purposes.
+	// When it does this, it uses the context/callout/callbackTypes parameters to setup everything appropriately.
+	// However, if a CFSocket already exists for the native socket,
+	// then it is returned (as per the documentation), which in turn screws up the CFStreams.
+	
+	CFSocketInvalidate(sock);
+	CFRelease(sock);
+	theSocket4 = NULL;
+	theSocket6 = NULL;
+	
+	NSError *err;
+	BOOL pass = YES;
+	
+	if(pass && ![self createStreamsFromNative:nativeSocket error:&err]) pass = NO;
+	if(pass && ![self attachStreamsToRunLoop:nil error:&err])           pass = NO;
+	if(pass && ![self openStreamsAndReturnError:&err])                  pass = NO;
+	
+	if(!pass)
+	{
+		[self closeWithError:err];
+	}
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Stream Implementation
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+/**
+ * Creates the CFReadStream and CFWriteStream from the given native socket.
+ * The CFSocket may be extracted from either stream after the streams have been opened.
+ * 
+ * Note: The given native socket must already be connected!
+**/
+- (BOOL)createStreamsFromNative:(CFSocketNativeHandle)native error:(NSError **)errPtr
+{
+	// Create the socket & streams.
+	CFStreamCreatePairWithSocket(kCFAllocatorDefault, native, &theReadStream, &theWriteStream);
+	if (theReadStream == NULL || theWriteStream == NULL)
+	{
+		NSError *err = [self getStreamError];
+		
+		NLLogNVPError(@"AsyncSocket %p couldn't create streams from accepted socket: %@", self, err);
+		
+		if (errPtr) *errPtr = err;
+		return NO;
+	}
+	
+	// Ensure the CF & BSD socket is closed when the streams are closed.
+	CFReadStreamSetProperty(theReadStream, kCFStreamPropertyShouldCloseNativeSocket, kCFBooleanTrue);
+	CFWriteStreamSetProperty(theWriteStream, kCFStreamPropertyShouldCloseNativeSocket, kCFBooleanTrue);
+	
+	return YES;
+}
+
+/**
+ * Creates the CFReadStream and CFWriteStream from the given hostname and port number.
+ * The CFSocket may be extracted from either stream after the streams have been opened.
+**/
+- (BOOL)createStreamsToHost:(NSString *)hostname onPort:(UInt16)port error:(NSError **)errPtr
+{
+	// Create the socket & streams.
+	CFStreamCreatePairWithSocketToHost(NULL, (__bridge CFStringRef)hostname, port, &theReadStream, &theWriteStream);
+	if (theReadStream == NULL || theWriteStream == NULL)
+	{
+		if (errPtr) *errPtr = [self getStreamError];
+		return NO;
+	}
+	
+	// Ensure the CF & BSD socket is closed when the streams are closed.
+	CFReadStreamSetProperty(theReadStream, kCFStreamPropertyShouldCloseNativeSocket, kCFBooleanTrue);
+	CFWriteStreamSetProperty(theWriteStream, kCFStreamPropertyShouldCloseNativeSocket, kCFBooleanTrue);
+	
+	return YES;
+}
+
+- (BOOL)attachStreamsToRunLoop:(NSRunLoop *)runLoop error:(NSError **)errPtr
+{
+	// Get the CFRunLoop to which the socket should be attached.
+	theRunLoop = (runLoop == nil) ? CFRunLoopGetCurrent() : [runLoop getCFRunLoop];
+
+	// Setup read stream callbacks
+	
+	CFOptionFlags readStreamEvents = kCFStreamEventHasBytesAvailable | 
+	                                 kCFStreamEventErrorOccurred     |
+	                                 kCFStreamEventEndEncountered    |
+	                                 kCFStreamEventOpenCompleted;
+	
+	if (!CFReadStreamSetClient(theReadStream,
+							   readStreamEvents,
+							   (CFReadStreamClientCallBack)&MyCFReadStreamCallback,
+							   (CFStreamClientContext *)(&theContext)))
+	{
+		NSError *err = [self getStreamError];
+		
+		NLLogNVPError (@"AsyncSocket %p couldn't attach read stream to run-loop,", self);
+		NLLogNVPError (@"Error: %@", err);
+		
+		if (errPtr) *errPtr = err;
+		return NO;
+	}
+
+	// Setup write stream callbacks
+	
+	CFOptionFlags writeStreamEvents = kCFStreamEventCanAcceptBytes |
+	                                  kCFStreamEventErrorOccurred  |
+	                                  kCFStreamEventEndEncountered |
+	                                  kCFStreamEventOpenCompleted;
+	
+	if (!CFWriteStreamSetClient (theWriteStream,
+								 writeStreamEvents,
+								 (CFWriteStreamClientCallBack)&MyCFWriteStreamCallback,
+								 (CFStreamClientContext *)(&theContext)))
+	{
+		NSError *err = [self getStreamError];
+		
+		NLLogNVPError (@"AsyncSocket %p couldn't attach write stream to run-loop,", self);
+		NLLogNVPError (@"Error: %@", err);
+		
+		if (errPtr) *errPtr = err;
+		return NO;
+	}
+	
+	// Add read and write streams to run loop
+	
+	for (NSString *runLoopMode in theRunLoopModes)
+	{
+		CFReadStreamScheduleWithRunLoop(theReadStream, theRunLoop, (__bridge CFStringRef)runLoopMode);
+		CFWriteStreamScheduleWithRunLoop(theWriteStream, theRunLoop, (__bridge CFStringRef)runLoopMode);
+	}
+	
+	return YES;
+}
+
+/**
+ * Allows the delegate method to configure the CFReadStream and/or CFWriteStream as desired before we connect.
+ * 
+ * If being called from a connect method,
+ * the CFSocket and CFNativeSocket will not be available until after the connection is opened.
+**/
+- (BOOL)configureStreamsAndReturnError:(NSError **)errPtr
+{
+	// Call the delegate method for further configuration.
+	if([theDelegate respondsToSelector:@selector(onSocketWillConnect:)])
+	{
+		if([theDelegate onSocketWillConnect:self] == NO)
+		{
+			if (errPtr) *errPtr = [self getAbortError];
+			return NO;
+		}
+	}
+	return YES;
+}
+
+- (BOOL)openStreamsAndReturnError:(NSError **)errPtr
+{
+	BOOL pass = YES;
+	
+	if(pass && !CFReadStreamOpen(theReadStream))
+	{
+		NLLogNVPWarn (@"AsyncSocket %p couldn't open read stream,", self);
+		pass = NO;
+	}
+	
+	if(pass && !CFWriteStreamOpen(theWriteStream))
+	{
+		NLLogNVPWarn (@"AsyncSocket %p couldn't open write stream,", self);
+		pass = NO;
+	}
+	
+	if(!pass)
+	{
+		if (errPtr) *errPtr = [self getStreamError];
+	}
+	
+	return pass;
+}
+
+/**
+ * Called when read or write streams open.
+ * When the socket is connected and both streams are open, consider the AsyncSocket instance to be ready.
+**/
+- (void)doStreamOpen
+{
+	if ((theFlags & kDidCompleteOpenForRead) && (theFlags & kDidCompleteOpenForWrite))
+	{
+		NSError *err = nil;
+		
+		// Get the socket
+		if (![self setSocketFromStreamsAndReturnError: &err])
+		{
+			NLLogNVPError (@"AsyncSocket %p couldn't get socket from streams, %@. Disconnecting.", self, err);
+			[self closeWithError:err];
+			return;
+		}
+		
+        // Stop the connection attempt timeout timer
+		[self endConnectTimeout];
+        
+		if ([theDelegate respondsToSelector:@selector(onSocket:didConnectToHost:port:)])
+		{
+			[theDelegate onSocket:self didConnectToHost:[self connectedHost] port:[self connectedPort]];
+		}
+		
+		// Immediately deal with any already-queued requests.
+		[self maybeDequeueRead];
+		[self maybeDequeueWrite];
+	}
+}
+
+- (BOOL)setSocketFromStreamsAndReturnError:(NSError **)errPtr
+{
+	// Get the CFSocketNativeHandle from theReadStream
+	CFSocketNativeHandle native;
+	CFDataRef nativeProp = CFReadStreamCopyProperty(theReadStream, kCFStreamPropertySocketNativeHandle);
+	if(nativeProp == NULL)
+	{
+		if (errPtr) *errPtr = [self getStreamError];
+		return NO;
+	}
+	
+	CFIndex nativePropLen = CFDataGetLength(nativeProp);
+	CFIndex nativeLen = (CFIndex)sizeof(native);
+	
+	CFIndex len = MIN(nativePropLen, nativeLen);
+	
+	CFDataGetBytes(nativeProp, CFRangeMake(0, len), (UInt8 *)&native);
+	CFRelease(nativeProp);
+	
+	CFSocketRef theSocket = CFSocketCreateWithNative(kCFAllocatorDefault, native, 0, NULL, NULL);
+	if(theSocket == NULL)
+	{
+		if (errPtr) *errPtr = [self getSocketError];
+		return NO;
+	}
+	
+	// Determine whether the connection was IPv4 or IPv6.
+	// We may already know if this was an accepted socket,
+	// or if the connectToAddress method was used.
+	// In either of the above two cases, the native socket variable would already be set.
+	
+	if (theNativeSocket4 > 0)
+	{
+		theSocket4 = theSocket;
+		return YES;
+	}
+	if (theNativeSocket6 > 0)
+	{
+		theSocket6 = theSocket;
+		return YES;
+	}
+	
+	CFDataRef peeraddr = CFSocketCopyPeerAddress(theSocket);
+	if(peeraddr == NULL)
+	{
+		NLLogNVPError(@"AsyncSocket couldn't determine IP version of socket");
+		
+		CFRelease(theSocket);
+		
+		if (errPtr) *errPtr = [self getSocketError];
+		return NO;
+	}
+	struct sockaddr *sa = (struct sockaddr *)CFDataGetBytePtr(peeraddr);
+	
+	if(sa->sa_family == AF_INET)
+	{
+		theSocket4 = theSocket;
+		theNativeSocket4 = native;
+	}
+	else
+	{
+		theSocket6 = theSocket;
+		theNativeSocket6 = native;
+	}
+	
+	CFRelease(peeraddr);
+
+	return YES;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Disconnect Implementation
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+// Sends error message and disconnects
+- (void)closeWithError:(NSError *)err
+{
+	theFlags |= kClosingWithError;
+	
+	if (theFlags & kDidStartDelegate)
+	{
+		// Try to salvage what data we can.
+		[self recoverUnreadData];
+		
+		// Let the delegate know, so it can try to recover if it likes.
+		if ([theDelegate respondsToSelector:@selector(onSocket:willDisconnectWithError:)])
+		{
+			[theDelegate onSocket:self willDisconnectWithError:err];
+		}
+	}
+	[self close];
+}
+
+// Prepare partially read data for recovery.
+- (void)recoverUnreadData
+{
+	if(theCurrentRead != nil)
+	{
+		// We never finished the current read.
+		// Check to see if it's a normal read packet (not AsyncSpecialPacket) and if it had read anything yet.
+		
+		if(([theCurrentRead isKindOfClass:[AsyncReadPacket class]]) && (theCurrentRead->bytesDone > 0))
+		{
+			// We need to move its data into the front of the partial read buffer.
+			
+			void *buffer = [theCurrentRead->buffer mutableBytes] + theCurrentRead->startOffset;
+			
+			[partialReadBuffer replaceBytesInRange:NSMakeRange(0, 0)
+										 withBytes:buffer
+											length:theCurrentRead->bytesDone];
+		}
+	}
+	
+	[self emptyQueues];
+}
+
+- (void)emptyQueues
+{
+	if (theCurrentRead != nil)	[self endCurrentRead];
+	if (theCurrentWrite != nil)	[self endCurrentWrite];
+	
+	[theReadQueue removeAllObjects];
+	[theWriteQueue removeAllObjects];
+	
+	[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(maybeDequeueRead) object:nil];
+	[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(maybeDequeueWrite) object:nil];
+	
+	theFlags &= ~kDequeueReadScheduled;
+	theFlags &= ~kDequeueWriteScheduled;
+}
+
+/**
+ * Disconnects. This is called for both error and clean disconnections.
+**/
+- (void)close
+{
+	// Empty queues
+	[self emptyQueues];
+	
+	// Clear partialReadBuffer (pre-buffer and also unreadData buffer in case of error)
+	[partialReadBuffer replaceBytesInRange:NSMakeRange(0, [partialReadBuffer length]) withBytes:NULL length:0];
+	
+	[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(disconnect) object:nil];
+	
+	// Stop the connection attempt timeout timer
+	if (theConnectTimer != nil)
+	{
+		[self endConnectTimeout];
+	}
+	
+	// Close streams.
+	if (theReadStream != NULL)
+	{
+        [self runLoopUnscheduleReadStream];
+		CFReadStreamClose(theReadStream);
+		CFRelease(theReadStream);
+		theReadStream = NULL;
+	}
+	if (theWriteStream != NULL)
+	{
+        [self runLoopUnscheduleWriteStream];
+		CFWriteStreamClose(theWriteStream);
+		CFRelease(theWriteStream);
+		theWriteStream = NULL;
+	}
+	
+	// Close sockets.
+	if (theSocket4 != NULL)
+	{
+		CFSocketInvalidate (theSocket4);
+		CFRelease (theSocket4);
+		theSocket4 = NULL;
+	}
+	if (theSocket6 != NULL)
+	{
+		CFSocketInvalidate (theSocket6);
+		CFRelease (theSocket6);
+		theSocket6 = NULL;
+	}
+	
+	// Closing the streams or sockets resulted in closing the underlying native socket
+	theNativeSocket4 = 0;
+	theNativeSocket6 = 0;
+	
+	// Remove run loop sources
+    if (theSource4 != NULL) 
+    {
+        [self runLoopRemoveSource:theSource4];
+		CFRelease (theSource4);
+		theSource4 = NULL;
+	}
+	if (theSource6 != NULL)
+	{
+        [self runLoopRemoveSource:theSource6];
+		CFRelease (theSource6);
+		theSource6 = NULL;
+	}
+	theRunLoop = NULL;
+	
+	// If the client has passed the connect/accept method, then the connection has at least begun.
+	// Notify delegate that it is now ending.
+	BOOL shouldCallDelegate = (theFlags & kDidStartDelegate);
+	
+	// Clear all flags (except the pre-buffering flag, which should remain as is)
+	theFlags &= kEnablePreBuffering;
+	
+	if (shouldCallDelegate)
+	{
+		if ([theDelegate respondsToSelector: @selector(onSocketDidDisconnect:)])
+		{
+			[theDelegate onSocketDidDisconnect:self];
+		}
+	}
+	
+	// Do not access any instance variables after calling onSocketDidDisconnect.
+	// This gives the delegate freedom to release us without returning here and crashing.
+}
+
+/**
+ * Disconnects immediately. Any pending reads or writes are dropped.
+**/
+- (void)disconnect
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	[self close];
+}
+
+/**
+ * Diconnects after all pending reads have completed.
+**/
+- (void)disconnectAfterReading
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	theFlags |= (kForbidReadsWrites | kDisconnectAfterReads);
+	
+	[self maybeScheduleDisconnect];
+}
+
+/**
+ * Disconnects after all pending writes have completed.
+**/
+- (void)disconnectAfterWriting
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	theFlags |= (kForbidReadsWrites | kDisconnectAfterWrites);
+	
+	[self maybeScheduleDisconnect];
+}
+
+/**
+ * Disconnects after all pending reads and writes have completed.
+**/
+- (void)disconnectAfterReadingAndWriting
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	theFlags |= (kForbidReadsWrites | kDisconnectAfterReads | kDisconnectAfterWrites);
+	
+	[self maybeScheduleDisconnect];
+}
+
+/**
+ * Schedules a call to disconnect if possible.
+ * That is, if all writes have completed, and we're set to disconnect after writing,
+ * or if all reads have completed, and we're set to disconnect after reading.
+**/
+- (void)maybeScheduleDisconnect
+{
+	BOOL shouldDisconnect = NO;
+	
+	if(theFlags & kDisconnectAfterReads)
+	{
+		if(([theReadQueue count] == 0) && (theCurrentRead == nil))
+		{
+			if(theFlags & kDisconnectAfterWrites)
+			{
+				if(([theWriteQueue count] == 0) && (theCurrentWrite == nil))
+				{
+					shouldDisconnect = YES;
+				}
+			}
+			else
+			{
+				shouldDisconnect = YES;
+			}
+		}
+	}
+	else if(theFlags & kDisconnectAfterWrites)
+	{
+		if(([theWriteQueue count] == 0) && (theCurrentWrite == nil))
+		{
+			shouldDisconnect = YES;
+		}
+	}
+	
+	if(shouldDisconnect)
+	{
+		[self performSelector:@selector(disconnect) withObject:nil afterDelay:0 inModes:theRunLoopModes];
+	}
+}
+
+/**
+ * In the event of an error, this method may be called during onSocket:willDisconnectWithError: to read
+ * any data that's left on the socket.
+**/
+- (NSData *)unreadData
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	// Ensure this method will only return data in the event of an error
+	if (!(theFlags & kClosingWithError)) return nil;
+	
+	if (theReadStream == NULL) return nil;
+	
+	NSUInteger totalBytesRead = [partialReadBuffer length];
+	
+	BOOL error = NO;
+	while (!error && CFReadStreamHasBytesAvailable(theReadStream))
+	{
+		if (totalBytesRead == [partialReadBuffer length])
+		{
+			[partialReadBuffer increaseLengthBy:READALL_CHUNKSIZE];
+		}
+		
+		// Number of bytes to read is space left in packet buffer.
+		NSUInteger bytesToRead = [partialReadBuffer length] - totalBytesRead;
+		
+		// Read data into packet buffer
+		UInt8 *packetbuf = (UInt8 *)( [partialReadBuffer mutableBytes] + totalBytesRead );
+		
+		CFIndex result = CFReadStreamRead(theReadStream, packetbuf, bytesToRead);
+		
+		// Check results
+		if (result < 0)
+		{
+			error = YES;
+		}
+		else
+		{
+			CFIndex bytesRead = result;
+			
+			totalBytesRead += bytesRead;
+		}
+	}
+	
+	[partialReadBuffer setLength:totalBytesRead];
+	
+	return partialReadBuffer;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Errors
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+/**
+ * Returns a standard error object for the current errno value.
+ * Errno is used for low-level BSD socket errors.
+**/
+- (NSError *)getErrnoError
+{
+	NSString *errorMsg = [NSString stringWithUTF8String:strerror(errno)];
+	NSDictionary *userInfo = [NSDictionary dictionaryWithObject:errorMsg forKey:NSLocalizedDescriptionKey];
+	
+	return [NSError errorWithDomain:NSPOSIXErrorDomain code:errno userInfo:userInfo];
+}
+
+/**
+ * Returns a standard error message for a CFSocket error.
+ * Unfortunately, CFSocket offers no feedback on its errors.
+**/
+- (NSError *)getSocketError
+{
+	NSString *errMsg = NSLocalizedStringWithDefaultValue(@"AsyncSocketCFSocketError",
+														 @"AsyncSocket", [NSBundle mainBundle],
+														 @"General CFSocket error", nil);
+	
+	NSDictionary *info = [NSDictionary dictionaryWithObject:errMsg forKey:NSLocalizedDescriptionKey];
+	
+	return [NSError errorWithDomain:AsyncSocketErrorDomain code:AsyncSocketCFSocketError userInfo:info];
+}
+
+- (NSError *)getStreamError
+{
+	CFStreamError err;
+	if (theReadStream != NULL)
+	{
+		err = CFReadStreamGetError (theReadStream);
+		if (err.error != 0) return [self errorFromCFStreamError: err];
+	}
+	
+	if (theWriteStream != NULL)
+	{
+		err = CFWriteStreamGetError (theWriteStream);
+		if (err.error != 0) return [self errorFromCFStreamError: err];
+	}
+	
+	return nil;
+}
+
+/**
+ * Returns a standard AsyncSocket abort error.
+**/
+- (NSError *)getAbortError
+{
+	NSString *errMsg = NSLocalizedStringWithDefaultValue(@"AsyncSocketCanceledError",
+														 @"AsyncSocket", [NSBundle mainBundle],
+														 @"Connection canceled", nil);
+	
+	NSDictionary *info = [NSDictionary dictionaryWithObject:errMsg forKey:NSLocalizedDescriptionKey];
+	
+	return [NSError errorWithDomain:AsyncSocketErrorDomain code:AsyncSocketCanceledError userInfo:info];
+}
+
+/**
+ * Returns a standard AsyncSocket connect timeout error.
+**/
+- (NSError *)getConnectTimeoutError
+{
+	NSString *errMsg = NSLocalizedStringWithDefaultValue(@"AsyncSocketConnectTimeoutError",
+														 @"AsyncSocket", [NSBundle mainBundle],
+														 @"Attempt to connect to host timed out", nil);
+	
+	NSDictionary *info = [NSDictionary dictionaryWithObject:errMsg forKey:NSLocalizedDescriptionKey];
+	
+	return [NSError errorWithDomain:AsyncSocketErrorDomain code:AsyncSocketConnectTimeoutError userInfo:info];
+}
+
+/**
+ * Returns a standard AsyncSocket maxed out error.
+**/
+- (NSError *)getReadMaxedOutError
+{
+	NSString *errMsg = NSLocalizedStringWithDefaultValue(@"AsyncSocketReadMaxedOutError",
+														 @"AsyncSocket", [NSBundle mainBundle],
+														 @"Read operation reached set maximum length", nil);
+	
+	NSDictionary *info = [NSDictionary dictionaryWithObject:errMsg forKey:NSLocalizedDescriptionKey];
+	
+	return [NSError errorWithDomain:AsyncSocketErrorDomain code:AsyncSocketReadMaxedOutError userInfo:info];
+}
+
+/**
+ * Returns a standard AsyncSocket read timeout error.
+**/
+- (NSError *)getReadTimeoutError
+{
+	NSString *errMsg = NSLocalizedStringWithDefaultValue(@"AsyncSocketReadTimeoutError",
+														 @"AsyncSocket", [NSBundle mainBundle],
+														 @"Read operation timed out", nil);
+	
+	NSDictionary *info = [NSDictionary dictionaryWithObject:errMsg forKey:NSLocalizedDescriptionKey];
+	
+	return [NSError errorWithDomain:AsyncSocketErrorDomain code:AsyncSocketReadTimeoutError userInfo:info];
+}
+
+/**
+ * Returns a standard AsyncSocket write timeout error.
+**/
+- (NSError *)getWriteTimeoutError
+{
+	NSString *errMsg = NSLocalizedStringWithDefaultValue(@"AsyncSocketWriteTimeoutError",
+														 @"AsyncSocket", [NSBundle mainBundle],
+														 @"Write operation timed out", nil);
+	
+	NSDictionary *info = [NSDictionary dictionaryWithObject:errMsg forKey:NSLocalizedDescriptionKey];
+	
+	return [NSError errorWithDomain:AsyncSocketErrorDomain code:AsyncSocketWriteTimeoutError userInfo:info];
+}
+
+- (NSError *)errorFromCFStreamError:(CFStreamError)err
+{
+	if (err.domain == 0 && err.error == 0) return nil;
+	
+	// Can't use switch; these constants aren't int literals.
+	NSString *domain = @"CFStreamError (unlisted domain)";
+	NSString *message = nil;
+	
+	if(err.domain == kCFStreamErrorDomainPOSIX) {
+		domain = NSPOSIXErrorDomain;
+	}
+	else if(err.domain == kCFStreamErrorDomainMacOSStatus) {
+		domain = NSOSStatusErrorDomain;
+	}
+	else if(err.domain == kCFStreamErrorDomainMach) {
+		domain = NSMachErrorDomain;
+	}
+	else if(err.domain == kCFStreamErrorDomainNetDB)
+	{
+		domain = @"kCFStreamErrorDomainNetDB";
+		message = [NSString stringWithCString:gai_strerror(err.error) encoding:NSASCIIStringEncoding];
+	}
+	else if(err.domain == kCFStreamErrorDomainNetServices) {
+		domain = @"kCFStreamErrorDomainNetServices";
+	}
+	else if(err.domain == kCFStreamErrorDomainSOCKS) {
+		domain = @"kCFStreamErrorDomainSOCKS";
+	}
+	else if(err.domain == kCFStreamErrorDomainSystemConfiguration) {
+		domain = @"kCFStreamErrorDomainSystemConfiguration";
+	}
+	else if(err.domain == kCFStreamErrorDomainSSL) {
+		domain = @"kCFStreamErrorDomainSSL";
+	}
+	
+	NSDictionary *info = nil;
+	if(message != nil)
+	{
+		info = [NSDictionary dictionaryWithObject:message forKey:NSLocalizedDescriptionKey];
+	}
+	return [NSError errorWithDomain:domain code:err.error userInfo:info];
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Diagnostics
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+- (BOOL)isDisconnected
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	if (theNativeSocket4 > 0) return NO;
+	if (theNativeSocket6 > 0) return NO;
+	
+	if (theSocket4) return NO;
+	if (theSocket6) return NO;
+	
+	if (theReadStream)  return NO;
+	if (theWriteStream) return NO;
+	
+	return YES;
+}
+
+- (BOOL)isConnected
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	return [self areStreamsConnected];
+}
+
+- (NSString *)connectedHost
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	if(theSocket4)
+		return [self connectedHostFromCFSocket4:theSocket4];
+	if(theSocket6)
+		return [self connectedHostFromCFSocket6:theSocket6];
+	
+	if(theNativeSocket4 > 0)
+		return [self connectedHostFromNativeSocket4:theNativeSocket4];
+	if(theNativeSocket6 > 0)
+		return [self connectedHostFromNativeSocket6:theNativeSocket6];
+	
+	return nil;
+}
+
+- (UInt16)connectedPort
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	if(theSocket4)
+		return [self connectedPortFromCFSocket4:theSocket4];
+	if(theSocket6)
+		return [self connectedPortFromCFSocket6:theSocket6];
+	
+	if(theNativeSocket4 > 0)
+		return [self connectedPortFromNativeSocket4:theNativeSocket4];
+	if(theNativeSocket6 > 0)
+		return [self connectedPortFromNativeSocket6:theNativeSocket6];
+	
+	return 0;
+}
+
+- (NSString *)localHost
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	if(theSocket4)
+		return [self localHostFromCFSocket4:theSocket4];
+	if(theSocket6)
+		return [self localHostFromCFSocket6:theSocket6];
+	
+	if(theNativeSocket4 > 0)
+		return [self localHostFromNativeSocket4:theNativeSocket4];
+	if(theNativeSocket6 > 0)
+		return [self localHostFromNativeSocket6:theNativeSocket6];
+	
+	return nil;
+}
+
+- (UInt16)localPort
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	if(theSocket4)
+		return [self localPortFromCFSocket4:theSocket4];
+	if(theSocket6)
+		return [self localPortFromCFSocket6:theSocket6];
+	
+	if(theNativeSocket4 > 0)
+		return [self localPortFromNativeSocket4:theNativeSocket4];
+	if(theNativeSocket6 > 0)
+		return [self localPortFromNativeSocket6:theNativeSocket6];
+	
+	return 0;
+}
+
+- (NSString *)connectedHost4
+{
+	if(theSocket4)
+		return [self connectedHostFromCFSocket4:theSocket4];
+	if(theNativeSocket4 > 0)
+		return [self connectedHostFromNativeSocket4:theNativeSocket4];
+	
+	return nil;
+}
+
+- (NSString *)connectedHost6
+{
+	if(theSocket6)
+		return [self connectedHostFromCFSocket6:theSocket6];
+	if(theNativeSocket6 > 0)
+		return [self connectedHostFromNativeSocket6:theNativeSocket6];
+	
+	return nil;
+}
+
+- (UInt16)connectedPort4
+{
+	if(theSocket4)
+		return [self connectedPortFromCFSocket4:theSocket4];
+	if(theNativeSocket4 > 0)
+		return [self connectedPortFromNativeSocket4:theNativeSocket4];
+	
+	return 0;
+}
+
+- (UInt16)connectedPort6
+{
+	if(theSocket6)
+		return [self connectedPortFromCFSocket6:theSocket6];
+	if(theNativeSocket6 > 0)
+		return [self connectedPortFromNativeSocket6:theNativeSocket6];
+	
+	return 0;
+}
+
+- (NSString *)localHost4
+{
+	if(theSocket4)
+		return [self localHostFromCFSocket4:theSocket4];
+	if(theNativeSocket4 > 0)
+		return [self localHostFromNativeSocket4:theNativeSocket4];
+	
+	return nil;
+}
+
+- (NSString *)localHost6
+{
+	if(theSocket6)
+		return [self localHostFromCFSocket6:theSocket6];
+	if(theNativeSocket6 > 0)
+		return [self localHostFromNativeSocket6:theNativeSocket6];
+	
+	return nil;
+}
+
+- (UInt16)localPort4
+{
+	if(theSocket4)
+		return [self localPortFromCFSocket4:theSocket4];
+	if(theNativeSocket4 > 0)
+		return [self localPortFromNativeSocket4:theNativeSocket4];
+	
+	return 0;
+}
+
+- (UInt16)localPort6
+{
+	if(theSocket6)
+		return [self localPortFromCFSocket6:theSocket6];
+	if(theNativeSocket6 > 0)
+		return [self localPortFromNativeSocket6:theNativeSocket6];
+	
+	return 0;
+}
+
+- (NSString *)connectedHostFromNativeSocket4:(CFSocketNativeHandle)theNativeSocket
+{
+	struct sockaddr_in sockaddr4;
+	socklen_t sockaddr4len = sizeof(sockaddr4);
+	
+	if(getpeername(theNativeSocket, (struct sockaddr *)&sockaddr4, &sockaddr4len) < 0)
+	{
+		return nil;
+	}
+	return [self hostFromAddress4:&sockaddr4];
+}
+
+- (NSString *)connectedHostFromNativeSocket6:(CFSocketNativeHandle)theNativeSocket
+{
+	struct sockaddr_in6 sockaddr6;
+	socklen_t sockaddr6len = sizeof(sockaddr6);
+	
+	if(getpeername(theNativeSocket, (struct sockaddr *)&sockaddr6, &sockaddr6len) < 0)
+	{
+		return nil;
+	}
+	return [self hostFromAddress6:&sockaddr6];
+}
+
+- (NSString *)connectedHostFromCFSocket4:(CFSocketRef)theSocket
+{
+	CFDataRef peeraddr;
+	NSString *peerstr = nil;
+
+	if((peeraddr = CFSocketCopyPeerAddress(theSocket)))
+	{
+		struct sockaddr_in *pSockAddr = (struct sockaddr_in *)CFDataGetBytePtr(peeraddr);
+
+		peerstr = [self hostFromAddress4:pSockAddr];
+		CFRelease (peeraddr);
+	}
+
+	return peerstr;
+}
+
+- (NSString *)connectedHostFromCFSocket6:(CFSocketRef)theSocket
+{
+	CFDataRef peeraddr;
+	NSString *peerstr = nil;
+
+	if((peeraddr = CFSocketCopyPeerAddress(theSocket)))
+	{
+		struct sockaddr_in6 *pSockAddr = (struct sockaddr_in6 *)CFDataGetBytePtr(peeraddr);
+		
+		peerstr = [self hostFromAddress6:pSockAddr];
+		CFRelease (peeraddr);
+	}
+
+	return peerstr;
+}
+
+- (UInt16)connectedPortFromNativeSocket4:(CFSocketNativeHandle)theNativeSocket
+{
+	struct sockaddr_in sockaddr4;
+	socklen_t sockaddr4len = sizeof(sockaddr4);
+	
+	if(getpeername(theNativeSocket, (struct sockaddr *)&sockaddr4, &sockaddr4len) < 0)
+	{
+		return 0;
+	}
+	return [self portFromAddress4:&sockaddr4];
+}
+
+- (UInt16)connectedPortFromNativeSocket6:(CFSocketNativeHandle)theNativeSocket
+{
+	struct sockaddr_in6 sockaddr6;
+	socklen_t sockaddr6len = sizeof(sockaddr6);
+	
+	if(getpeername(theNativeSocket, (struct sockaddr *)&sockaddr6, &sockaddr6len) < 0)
+	{
+		return 0;
+	}
+	return [self portFromAddress6:&sockaddr6];
+}
+
+- (UInt16)connectedPortFromCFSocket4:(CFSocketRef)theSocket
+{
+	CFDataRef peeraddr;
+	UInt16 peerport = 0;
+
+	if((peeraddr = CFSocketCopyPeerAddress(theSocket)))
+	{
+		struct sockaddr_in *pSockAddr = (struct sockaddr_in *)CFDataGetBytePtr(peeraddr);
+		
+		peerport = [self portFromAddress4:pSockAddr];
+		CFRelease (peeraddr);
+	}
+
+	return peerport;
+}
+
+- (UInt16)connectedPortFromCFSocket6:(CFSocketRef)theSocket
+{
+	CFDataRef peeraddr;
+	UInt16 peerport = 0;
+
+	if((peeraddr = CFSocketCopyPeerAddress(theSocket)))
+	{
+		struct sockaddr_in6 *pSockAddr = (struct sockaddr_in6 *)CFDataGetBytePtr(peeraddr);
+		
+		peerport = [self portFromAddress6:pSockAddr];
+		CFRelease (peeraddr);
+	}
+
+	return peerport;
+}
+
+- (NSString *)localHostFromNativeSocket4:(CFSocketNativeHandle)theNativeSocket
+{
+	struct sockaddr_in sockaddr4;
+	socklen_t sockaddr4len = sizeof(sockaddr4);
+	
+	if(getsockname(theNativeSocket, (struct sockaddr *)&sockaddr4, &sockaddr4len) < 0)
+	{
+		return nil;
+	}
+	return [self hostFromAddress4:&sockaddr4];
+}
+
+- (NSString *)localHostFromNativeSocket6:(CFSocketNativeHandle)theNativeSocket
+{
+	struct sockaddr_in6 sockaddr6;
+	socklen_t sockaddr6len = sizeof(sockaddr6);
+	
+	if(getsockname(theNativeSocket, (struct sockaddr *)&sockaddr6, &sockaddr6len) < 0)
+	{
+		return nil;
+	}
+	return [self hostFromAddress6:&sockaddr6];
+}
+
+- (NSString *)localHostFromCFSocket4:(CFSocketRef)theSocket
+{
+	CFDataRef selfaddr;
+	NSString *selfstr = nil;
+
+	if((selfaddr = CFSocketCopyAddress(theSocket)))
+	{
+		struct sockaddr_in *pSockAddr = (struct sockaddr_in *)CFDataGetBytePtr(selfaddr);
+		
+		selfstr = [self hostFromAddress4:pSockAddr];
+		CFRelease (selfaddr);
+	}
+
+	return selfstr;
+}
+
+- (NSString *)localHostFromCFSocket6:(CFSocketRef)theSocket
+{
+	CFDataRef selfaddr;
+	NSString *selfstr = nil;
+
+	if((selfaddr = CFSocketCopyAddress(theSocket)))
+	{
+		struct sockaddr_in6 *pSockAddr = (struct sockaddr_in6 *)CFDataGetBytePtr(selfaddr);
+		
+		selfstr = [self hostFromAddress6:pSockAddr];
+		CFRelease (selfaddr);
+	}
+
+	return selfstr;
+}
+
+- (UInt16)localPortFromNativeSocket4:(CFSocketNativeHandle)theNativeSocket
+{
+	struct sockaddr_in sockaddr4;
+	socklen_t sockaddr4len = sizeof(sockaddr4);
+	
+	if(getsockname(theNativeSocket, (struct sockaddr *)&sockaddr4, &sockaddr4len) < 0)
+	{
+		return 0;
+	}
+	return [self portFromAddress4:&sockaddr4];
+}
+
+- (UInt16)localPortFromNativeSocket6:(CFSocketNativeHandle)theNativeSocket
+{
+	struct sockaddr_in6 sockaddr6;
+	socklen_t sockaddr6len = sizeof(sockaddr6);
+	
+	if(getsockname(theNativeSocket, (struct sockaddr *)&sockaddr6, &sockaddr6len) < 0)
+	{
+		return 0;
+	}
+	return [self portFromAddress6:&sockaddr6];
+}
+
+- (UInt16)localPortFromCFSocket4:(CFSocketRef)theSocket
+{
+	CFDataRef selfaddr;
+	UInt16 selfport = 0;
+
+	if ((selfaddr = CFSocketCopyAddress(theSocket)))
+	{
+		struct sockaddr_in *pSockAddr = (struct sockaddr_in *)CFDataGetBytePtr(selfaddr);
+		
+		selfport = [self portFromAddress4:pSockAddr];
+		CFRelease (selfaddr);
+	}
+
+	return selfport;
+}
+
+- (UInt16)localPortFromCFSocket6:(CFSocketRef)theSocket
+{
+	CFDataRef selfaddr;
+	UInt16 selfport = 0;
+
+	if ((selfaddr = CFSocketCopyAddress(theSocket)))
+	{
+		struct sockaddr_in6 *pSockAddr = (struct sockaddr_in6 *)CFDataGetBytePtr(selfaddr);
+		
+		selfport = [self portFromAddress6:pSockAddr];
+		CFRelease (selfaddr);
+	}
+
+	return selfport;
+}
+
+- (NSString *)hostFromAddress4:(struct sockaddr_in *)pSockaddr4
+{
+	char addrBuf[INET_ADDRSTRLEN];
+	
+	if(inet_ntop(AF_INET, &pSockaddr4->sin_addr, addrBuf, (socklen_t)sizeof(addrBuf)) == NULL)
+	{
+		[NSException raise:NSInternalInconsistencyException format:@"Cannot convert IPv4 address to string."];
+	}
+	
+	return [NSString stringWithCString:addrBuf encoding:NSASCIIStringEncoding];
+}
+
+- (NSString *)hostFromAddress6:(struct sockaddr_in6 *)pSockaddr6
+{
+	char addrBuf[INET6_ADDRSTRLEN];
+	
+	if(inet_ntop(AF_INET6, &pSockaddr6->sin6_addr, addrBuf, (socklen_t)sizeof(addrBuf)) == NULL)
+	{
+		[NSException raise:NSInternalInconsistencyException format:@"Cannot convert IPv6 address to string."];
+	}
+	
+	return [NSString stringWithCString:addrBuf encoding:NSASCIIStringEncoding];
+}
+
+- (UInt16)portFromAddress4:(struct sockaddr_in *)pSockaddr4
+{
+	return ntohs(pSockaddr4->sin_port);
+}
+
+- (UInt16)portFromAddress6:(struct sockaddr_in6 *)pSockaddr6
+{
+	return ntohs(pSockaddr6->sin6_port);
+}
+
+- (NSData *)connectedAddress
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	// Extract address from CFSocket
+	
+    CFSocketRef theSocket;
+    
+    if (theSocket4)
+        theSocket = theSocket4;
+    else
+        theSocket = theSocket6;
+    
+    if (theSocket)
+    {
+		CFDataRef peeraddr = CFSocketCopyPeerAddress(theSocket);
+		
+		if (peeraddr == NULL) return nil;
+		
+		NSData *result = (__bridge_transfer NSData *)peeraddr;
+		return result;
+	}
+	
+	// Extract address from CFSocketNativeHandle
+	
+	socklen_t sockaddrlen;
+	CFSocketNativeHandle theNativeSocket = 0;
+	
+	if (theNativeSocket4 > 0)
+	{
+		theNativeSocket = theNativeSocket4;
+		sockaddrlen = sizeof(struct sockaddr_in);
+	}
+	else
+	{
+		theNativeSocket = theNativeSocket6;
+		sockaddrlen = sizeof(struct sockaddr_in6);
+	}
+	
+	NSData *result = nil;
+	void *sockaddr = malloc(sockaddrlen);
+	
+	if(getpeername(theNativeSocket, (struct sockaddr *)sockaddr, &sockaddrlen) >= 0)
+	{
+		result = [NSData dataWithBytesNoCopy:sockaddr length:sockaddrlen freeWhenDone:YES];
+	}
+	else
+	{
+		free(sockaddr);
+	}
+	
+	return result;
+}
+
+- (NSData *)localAddress
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	// Extract address from CFSocket
+	
+    CFSocketRef theSocket;
+    
+    if (theSocket4)
+        theSocket = theSocket4;
+    else
+        theSocket = theSocket6;
+    
+    if (theSocket)
+    {
+		CFDataRef selfaddr = CFSocketCopyAddress(theSocket);
+		
+		if (selfaddr == NULL) return nil;
+		
+		NSData *result = (__bridge_transfer NSData *)selfaddr;
+		return result;
+	}
+	
+	// Extract address from CFSocketNativeHandle
+	
+	socklen_t sockaddrlen;
+	CFSocketNativeHandle theNativeSocket = 0;
+	
+	if (theNativeSocket4 > 0)
+	{
+		theNativeSocket = theNativeSocket4;
+		sockaddrlen = sizeof(struct sockaddr_in);
+	}
+	else
+	{
+		theNativeSocket = theNativeSocket6;
+		sockaddrlen = sizeof(struct sockaddr_in6);
+	}
+	
+	NSData *result = nil;
+	void *sockaddr = malloc(sockaddrlen);
+	
+	if(getsockname(theNativeSocket, (struct sockaddr *)sockaddr, &sockaddrlen) >= 0)
+	{
+		result = [NSData dataWithBytesNoCopy:sockaddr length:sockaddrlen freeWhenDone:YES];
+	}
+	else
+	{
+		free(sockaddr);
+	}
+	
+	return result;
+}
+
+- (BOOL)isIPv4
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	return (theNativeSocket4 > 0 || theSocket4 != NULL);
+}
+
+- (BOOL)isIPv6
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	return (theNativeSocket6 > 0 || theSocket6 != NULL);
+}
+
+- (BOOL)areStreamsConnected
+{
+	CFStreamStatus s;
+    
+	if (theReadStream != NULL)
+	{
+		s = CFReadStreamGetStatus(theReadStream);
+		if ( !(s == kCFStreamStatusOpen || s == kCFStreamStatusReading || s == kCFStreamStatusError) )
+			return NO;
+	}
+	else return NO;
+    
+	if (theWriteStream != NULL)
+	{
+		s = CFWriteStreamGetStatus(theWriteStream);
+		if ( !(s == kCFStreamStatusOpen || s == kCFStreamStatusWriting || s == kCFStreamStatusError) )
+			return NO;
+	}
+	else return NO;
+    
+	return YES;
+}
+
+- (NSString *)description
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	static const char *statstr[] = {"not open","opening","open","reading","writing","at end","closed","has error"};
+	CFStreamStatus rs = (theReadStream != NULL) ? CFReadStreamGetStatus(theReadStream) : 0;
+	CFStreamStatus ws = (theWriteStream != NULL) ? CFWriteStreamGetStatus(theWriteStream) : 0;
+	
+	NSString *peerstr, *selfstr;
+
+	BOOL is4 = [self isIPv4];
+	BOOL is6 = [self isIPv6];
+	
+	if (is4 || is6)
+	{
+		if (is4 && is6)
+		{
+			peerstr = [NSString stringWithFormat: @"%@/%@ %u", 
+					   [self connectedHost4],
+					   [self connectedHost6],
+					   [self connectedPort]];
+		}
+		else if (is4)
+		{
+			peerstr = [NSString stringWithFormat: @"%@ %u", 
+					   [self connectedHost4],
+					   [self connectedPort4]];
+		}
+		else
+		{
+			peerstr = [NSString stringWithFormat: @"%@ %u",
+					   [self connectedHost6],
+					   [self connectedPort6]];
+		}
+	}
+	else peerstr = @"nowhere";
+
+	if (is4 || is6)
+	{
+		if (is4 && is6)
+		{
+			selfstr = [NSString stringWithFormat: @"%@/%@ %u",
+					   [self localHost4],
+					   [self localHost6],
+					   [self localPort]];
+		}
+		else if (is4)
+		{
+			selfstr = [NSString stringWithFormat: @"%@ %u",
+					   [self localHost4],
+					   [self localPort4]];
+		}
+		else
+		{
+			selfstr = [NSString stringWithFormat: @"%@ %u",
+					   [self localHost6],
+					   [self localPort6]];
+		}
+	}
+	else selfstr = @"nowhere";
+	
+	NSMutableString *ms = [[NSMutableString alloc] initWithCapacity:150];
+	
+	[ms appendString:[NSString stringWithFormat:@"<AsyncSocket %p", self]];
+	[ms appendString:[NSString stringWithFormat:@" local %@ remote %@ ", selfstr, peerstr]];
+	
+	unsigned readQueueCount  = (unsigned)[theReadQueue count];
+	unsigned writeQueueCount = (unsigned)[theWriteQueue count];
+	
+	[ms appendString:[NSString stringWithFormat:@"has queued %u reads %u writes, ", readQueueCount, writeQueueCount]];
+
+	if (theCurrentRead == nil || [theCurrentRead isKindOfClass:[AsyncSpecialPacket class]])
+		[ms appendString: @"no current read, "];
+	else
+	{
+		int percentDone;
+		if (theCurrentRead->readLength > 0)
+			percentDone = (float)theCurrentRead->bytesDone / (float)theCurrentRead->readLength * 100.0F;
+		else
+			percentDone = 100.0F;
+
+		[ms appendString: [NSString stringWithFormat:@"currently read %u bytes (%d%% done), ",
+			(unsigned int)[theCurrentRead->buffer length],
+			theCurrentRead->bytesDone ? percentDone : 0]];
+	}
+
+	if (theCurrentWrite == nil || [theCurrentWrite isKindOfClass:[AsyncSpecialPacket class]])
+		[ms appendString: @"no current write, "];
+	else
+	{
+		int percentDone = (float)theCurrentWrite->bytesDone / (float)[theCurrentWrite->buffer length] * 100.0F;
+
+		[ms appendString: [NSString stringWithFormat:@"currently written %u (%d%%), ",
+			(unsigned int)[theCurrentWrite->buffer length],
+			theCurrentWrite->bytesDone ? percentDone : 0]];
+	}
+	
+	[ms appendString:[NSString stringWithFormat:@"read stream %p %s, ", theReadStream, statstr[rs]]];
+	[ms appendString:[NSString stringWithFormat:@"write stream %p %s", theWriteStream, statstr[ws]]];
+	
+	if(theFlags & kDisconnectAfterReads)
+	{
+		if(theFlags & kDisconnectAfterWrites)
+			[ms appendString: @", will disconnect after reads & writes"];
+		else
+			[ms appendString: @", will disconnect after reads"];
+	}
+	else if(theFlags & kDisconnectAfterWrites)
+	{
+		[ms appendString: @", will disconnect after writes"];
+	}
+	
+	if (![self isConnected]) [ms appendString: @", not connected"];
+
+	[ms appendString:@">"];
+
+	return ms;
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Reading
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+- (void)readDataWithTimeout:(NSTimeInterval)timeout tag:(long)tag
+{
+	[self readDataWithTimeout:timeout buffer:nil bufferOffset:0 maxLength:0 tag:tag];
+}
+
+- (void)readDataWithTimeout:(NSTimeInterval)timeout
+                     buffer:(NSMutableData *)buffer
+               bufferOffset:(NSUInteger)offset
+                        tag:(long)tag
+{
+	[self readDataWithTimeout:timeout buffer:buffer bufferOffset:offset maxLength:0 tag:tag];
+}
+
+- (void)readDataWithTimeout:(NSTimeInterval)timeout
+                     buffer:(NSMutableData *)buffer
+               bufferOffset:(NSUInteger)offset
+                  maxLength:(NSUInteger)length
+                        tag:(long)tag
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	if (offset > [buffer length]) return;
+	if (theFlags & kForbidReadsWrites) return;
+	
+	AsyncReadPacket *packet = [[AsyncReadPacket alloc] initWithData:buffer
+	                                                    startOffset:offset
+	                                                      maxLength:length
+	                                                        timeout:timeout
+	                                                     readLength:0
+	                                                     terminator:nil
+	                                                            tag:tag];
+	[theReadQueue addObject:packet];
+	[self scheduleDequeueRead];
+	
+}
+
+- (void)readDataToLength:(NSUInteger)length withTimeout:(NSTimeInterval)timeout tag:(long)tag
+{
+	[self readDataToLength:length withTimeout:timeout buffer:nil bufferOffset:0 tag:tag];
+}
+
+- (void)readDataToLength:(NSUInteger)length
+             withTimeout:(NSTimeInterval)timeout
+                  buffer:(NSMutableData *)buffer
+            bufferOffset:(NSUInteger)offset
+                     tag:(long)tag
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	if (length == 0) return;
+	if (offset > [buffer length]) return;
+	if (theFlags & kForbidReadsWrites) return;
+	
+	AsyncReadPacket *packet = [[AsyncReadPacket alloc] initWithData:buffer
+	                                                    startOffset:offset
+	                                                      maxLength:0
+	                                                        timeout:timeout
+	                                                     readLength:length
+	                                                     terminator:nil
+	                                                            tag:tag];
+	[theReadQueue addObject:packet];
+	[self scheduleDequeueRead];
+	
+}
+
+- (void)readDataToData:(NSData *)data withTimeout:(NSTimeInterval)timeout tag:(long)tag
+{
+	[self readDataToData:data withTimeout:timeout buffer:nil bufferOffset:0 maxLength:0 tag:tag];
+}
+
+- (void)readDataToData:(NSData *)data
+           withTimeout:(NSTimeInterval)timeout
+                buffer:(NSMutableData *)buffer
+          bufferOffset:(NSUInteger)offset
+                   tag:(long)tag
+{
+	[self readDataToData:data withTimeout:timeout buffer:buffer bufferOffset:offset maxLength:0 tag:tag];
+}
+
+- (void)readDataToData:(NSData *)data withTimeout:(NSTimeInterval)timeout maxLength:(NSUInteger)length tag:(long)tag
+{
+	[self readDataToData:data withTimeout:timeout buffer:nil bufferOffset:0 maxLength:length tag:tag];
+}
+
+- (void)readDataToData:(NSData *)data
+           withTimeout:(NSTimeInterval)timeout
+                buffer:(NSMutableData *)buffer
+          bufferOffset:(NSUInteger)offset
+             maxLength:(NSUInteger)length
+                   tag:(long)tag
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	if (data == nil || [data length] == 0) return;
+	if (offset > [buffer length]) return;
+	if (length > 0 && length < [data length]) return;
+	if (theFlags & kForbidReadsWrites) return;
+	
+	AsyncReadPacket *packet = [[AsyncReadPacket alloc] initWithData:buffer
+	                                                    startOffset:offset
+	                                                      maxLength:length
+	                                                        timeout:timeout
+	                                                     readLength:0
+	                                                     terminator:data
+	                                                            tag:tag];
+	[theReadQueue addObject:packet];
+	[self scheduleDequeueRead];
+	
+}
+
+/**
+ * Puts a maybeDequeueRead on the run loop. 
+ * An assumption here is that selectors will be performed consecutively within their priority.
+**/
+- (void)scheduleDequeueRead
+{
+	if((theFlags & kDequeueReadScheduled) == 0)
+	{
+		theFlags |= kDequeueReadScheduled;
+		[self performSelector:@selector(maybeDequeueRead) withObject:nil afterDelay:0 inModes:theRunLoopModes];
+	}
+}
+
+/**
+ * This method starts a new read, if needed.
+ * It is called when a user requests a read,
+ * or when a stream opens that may have requested reads sitting in the queue, etc.
+**/
+- (void)maybeDequeueRead
+{
+	// Unset the flag indicating a call to this method is scheduled
+	theFlags &= ~kDequeueReadScheduled;
+	
+	// If we're not currently processing a read AND we have an available read stream
+	if((theCurrentRead == nil) && (theReadStream != NULL))
+	{
+		if([theReadQueue count] > 0)
+		{
+			// Dequeue the next object in the write queue
+			theCurrentRead = [theReadQueue objectAtIndex:0];
+			[theReadQueue removeObjectAtIndex:0];
+			
+			if([theCurrentRead isKindOfClass:[AsyncSpecialPacket class]])
+			{
+				// Attempt to start TLS
+				theFlags |= kStartingReadTLS;
+				
+				// This method won't do anything unless both kStartingReadTLS and kStartingWriteTLS are set
+				[self maybeStartTLS];
+			}
+			else
+			{
+				// Start time-out timer
+				if(theCurrentRead->timeout >= 0.0)
+				{
+                    NexusTalkTimerTarget *timerTarget = [[NexusTalkTimerTarget alloc] init];
+                    timerTarget.actualTarget = self;
+					theReadTimer = [NSTimer timerWithTimeInterval:theCurrentRead->timeout
+														   target:timerTarget
+														 selector:@selector(timerFired:)
+														 userInfo:nil
+														  repeats:NO];
+					[self runLoopAddTimer:theReadTimer];
+				}
+				
+				// Immediately read, if possible
+				[self doBytesAvailable];
+			}
+		}
+		else if(theFlags & kDisconnectAfterReads)
+		{
+			if(theFlags & kDisconnectAfterWrites)
+			{
+				if(([theWriteQueue count] == 0) && (theCurrentWrite == nil))
+				{
+					[self disconnect];
+				}
+			}
+			else
+			{
+				[self disconnect];
+			}
+		}
+	}
+}
+
+/**
+ * Call this method in doBytesAvailable instead of CFReadStreamHasBytesAvailable().
+ * This method supports pre-buffering properly as well as the kSocketHasBytesAvailable flag.
+**/
+- (BOOL)hasBytesAvailable
+{
+	if ((theFlags & kSocketHasBytesAvailable) || ([partialReadBuffer length] > 0))
+	{
+		return YES;
+	}
+	else
+	{
+		return CFReadStreamHasBytesAvailable(theReadStream);
+	}
+}
+
+/**
+ * Call this method in doBytesAvailable instead of CFReadStreamRead().
+ * This method support pre-buffering properly.
+**/
+- (CFIndex)readIntoBuffer:(void *)buffer maxLength:(NSUInteger)length
+{
+	if([partialReadBuffer length] > 0)
+	{
+		// Determine the maximum amount of data to read
+		NSUInteger bytesToRead = MIN(length, [partialReadBuffer length]);
+		
+		// Copy the bytes from the partial read buffer
+		memcpy(buffer, [partialReadBuffer bytes], (size_t)bytesToRead);
+		
+		// Remove the copied bytes from the partial read buffer
+		[partialReadBuffer replaceBytesInRange:NSMakeRange(0, bytesToRead) withBytes:NULL length:0];
+		
+		return (CFIndex)bytesToRead;
+	}
+	else
+	{
+		// Unset the "has-bytes-available" flag
+		theFlags &= ~kSocketHasBytesAvailable;
+		
+		return CFReadStreamRead(theReadStream, (UInt8 *)buffer, length);
+	}
+}
+
+/**
+ * This method is called when a new read is taken from the read queue or when new data becomes available on the stream.
+**/
+- (void)doBytesAvailable
+{
+	// If data is available on the stream, but there is no read request, then we don't need to process the data yet.
+	// Also, if there is a read request but no read stream setup, we can't process any data yet.
+	if((theCurrentRead == nil) || (theReadStream == NULL))
+	{
+		return;
+	}
+	
+	// Note: This method is not called if theCurrentRead is an AsyncSpecialPacket (startTLS packet)
+	
+	NSUInteger totalBytesRead = 0;
+	
+	BOOL done = NO;
+	BOOL socketError = NO;
+	BOOL maxoutError = NO;
+	
+	while(!done && !socketError && !maxoutError && [self hasBytesAvailable])
+	{
+		BOOL didPreBuffer = NO;
+		BOOL didReadFromPreBuffer = NO;
+		
+		// There are 3 types of read packets:
+		// 
+		// 1) Read all available data.
+		// 2) Read a specific length of data.
+		// 3) Read up to a particular terminator.
+		
+		NSUInteger bytesToRead;
+		
+		if (theCurrentRead->term != nil)
+		{
+			// Read type #3 - read up to a terminator
+			// 
+			// If pre-buffering is enabled we'll read a chunk and search for the terminator.
+			// If the terminator is found, overflow data will be placed in the partialReadBuffer for the next read.
+			// 
+			// If pre-buffering is disabled we'll be forced to read only a few bytes.
+			// Just enough to ensure we don't go past our term or over our max limit.
+			// 
+			// If we already have data pre-buffered, we can read directly from it.
+			
+			if ([partialReadBuffer length] > 0)
+			{
+				didReadFromPreBuffer = YES;
+				bytesToRead = [theCurrentRead readLengthForTermWithPreBuffer:partialReadBuffer found:&done];
+			}
+			else
+			{
+				if (theFlags & kEnablePreBuffering)
+				{
+					didPreBuffer = YES;
+					bytesToRead = [theCurrentRead prebufferReadLengthForTerm];
+				}
+				else
+				{
+					bytesToRead = [theCurrentRead readLengthForTerm];
+				}
+			}
+		}
+		else
+		{
+			// Read type #1 or #2
+			
+			bytesToRead = [theCurrentRead readLengthForNonTerm];
+		}
+		
+		// Make sure we have enough room in the buffer for our read
+		
+		NSUInteger buffSize = [theCurrentRead->buffer length];
+		NSUInteger buffSpace = buffSize - theCurrentRead->startOffset - theCurrentRead->bytesDone;
+		
+		if (bytesToRead > buffSpace)
+		{
+			NSUInteger buffInc = bytesToRead - buffSpace;
+			
+			[theCurrentRead->buffer increaseLengthBy:buffInc];
+		}
+		
+		// Read data into packet buffer
+		
+		void *buffer = [theCurrentRead->buffer mutableBytes] + theCurrentRead->startOffset;
+		void *subBuffer = buffer + theCurrentRead->bytesDone;
+		
+		CFIndex result = [self readIntoBuffer:subBuffer maxLength:bytesToRead];
+		
+		// Check results
+		if (result < 0)
+		{
+			socketError = YES;
+		}
+		else
+		{
+			CFIndex bytesRead = result;
+			
+			// Update total amount read for the current read
+			theCurrentRead->bytesDone += bytesRead;
+			
+			// Update total amount read in this method invocation
+			totalBytesRead += bytesRead;
+		
+		
+			// Is packet done?
+			if (theCurrentRead->readLength > 0)
+			{
+				// Read type #2 - read a specific length of data
+				
+				done = (theCurrentRead->bytesDone == theCurrentRead->readLength);
+			}
+			else if (theCurrentRead->term != nil)
+			{
+				// Read type #3 - read up to a terminator
+				
+				if (didPreBuffer)
+				{
+					// Search for the terminating sequence within the big chunk we just read.
+					
+					NSInteger overflow = [theCurrentRead searchForTermAfterPreBuffering:result];
+					
+					if (overflow > 0)
+					{
+						// Copy excess data into partialReadBuffer
+						void *overflowBuffer = buffer + theCurrentRead->bytesDone - overflow;
+						
+						[partialReadBuffer appendBytes:overflowBuffer length:overflow];
+						
+						// Update the bytesDone variable.
+						theCurrentRead->bytesDone -= overflow;
+						
+						// Note: The completeCurrentRead method will trim the buffer for us.
+					}
+					
+					done = (overflow >= 0);
+				}
+				else if (didReadFromPreBuffer)
+				{
+					// Our 'done' variable was updated via the readLengthForTermWithPreBuffer:found: method
+				}
+				else
+				{
+					// Search for the terminating sequence at the end of the buffer
+					
+					NSUInteger termlen = [theCurrentRead->term length];
+					
+					if(theCurrentRead->bytesDone >= termlen)
+					{
+						void *bufferEnd = buffer + (theCurrentRead->bytesDone - termlen);
+						
+						const void *seq = [theCurrentRead->term bytes];
+						
+						done = (memcmp (bufferEnd, seq, termlen) == 0);
+					}
+				}
+				
+				if(!done && theCurrentRead->maxLength > 0)
+				{
+					// We're not done and there's a set maxLength.
+					// Have we reached that maxLength yet?
+					
+					if(theCurrentRead->bytesDone >= theCurrentRead->maxLength)
+					{
+						maxoutError = YES;
+					}
+				}
+			}
+			else
+			{
+				// Read type #1 - read all available data
+				// 
+				// We're done when:
+				// - we reach maxLength (if there is a max)
+				// - all readable is read (see below)
+				
+				if (theCurrentRead->maxLength > 0)
+				{
+					done = (theCurrentRead->bytesDone >= theCurrentRead->maxLength);
+				}
+			}
+		}
+	}
+	
+	if (theCurrentRead->readLength <= 0 && theCurrentRead->term == nil)
+	{
+		// Read type #1 - read all available data
+		
+		if (theCurrentRead->bytesDone > 0)
+		{
+			// Ran out of bytes, so the "read-all-available-data" type packet is done
+			done = YES;
+		}
+	}
+	
+	if (done)
+	{
+		[self completeCurrentRead];
+        if (!socketError) {
+            [self scheduleDequeueRead];
+        }
+	}
+	else if (totalBytesRead > 0)
+	{
+		// We're not done with the readToLength or readToData yet, but we have read in some bytes
+		if ([theDelegate respondsToSelector:@selector(onSocket:didReadPartialDataOfLength:tag:)])
+		{
+			[theDelegate onSocket:self didReadPartialDataOfLength:totalBytesRead tag:theCurrentRead->tag];
+		}
+	}
+	
+	if(socketError)
+	{
+		CFStreamError err = CFReadStreamGetError(theReadStream);
+		[self closeWithError:[self errorFromCFStreamError:err]];
+		return;
+	}
+	
+	if(maxoutError)
+	{
+		[self closeWithError:[self getReadMaxedOutError]];
+		return;
+	}
+}
+
+// Ends current read and calls delegate.
+- (void)completeCurrentRead
+{
+	NSAssert(theCurrentRead, @"Trying to complete current read when there is no current read.");
+	
+	NSData *result;
+	
+	if (theCurrentRead->bufferOwner)
+	{
+		// We created the buffer on behalf of the user.
+		// Trim our buffer to be the proper size.
+		[theCurrentRead->buffer setLength:theCurrentRead->bytesDone];
+		
+		result = theCurrentRead->buffer;
+	}
+	else
+	{
+		// We did NOT create the buffer.
+		// The buffer is owned by the caller.
+		// Only trim the buffer if we had to increase its size.
+		
+		if ([theCurrentRead->buffer length] > theCurrentRead->originalBufferLength)
+		{
+			NSUInteger readSize = theCurrentRead->startOffset + theCurrentRead->bytesDone;
+			NSUInteger origSize = theCurrentRead->originalBufferLength;
+			
+			NSUInteger buffSize = MAX(readSize, origSize);
+			
+			[theCurrentRead->buffer setLength:buffSize];
+		}
+		
+		void *buffer = [theCurrentRead->buffer mutableBytes] + theCurrentRead->startOffset;
+		
+		result = [NSData dataWithBytesNoCopy:buffer length:theCurrentRead->bytesDone freeWhenDone:NO];
+	}
+	
+	if([theDelegate respondsToSelector:@selector(onSocket:didReadData:withTag:)])
+	{
+		[theDelegate onSocket:self didReadData:result withTag:theCurrentRead->tag];
+	}
+	
+	// Caller may have disconnected in the above delegate method
+	if (theCurrentRead != nil)
+	{
+		[self endCurrentRead];
+	}
+}
+
+// Ends current read.
+- (void)endCurrentRead
+{
+	NSAssert(theCurrentRead, @"Trying to end current read when there is no current read.");
+	
+	[theReadTimer invalidate];
+	theReadTimer = nil;
+	
+	theCurrentRead = nil;
+}
+
+- (void)timerFired:(NSTimer *)timer {
+    if (timer == theReadTimer) {
+        [self doReadTimeout:timer];
+    } else if (timer == theWriteTimer) {
+        [self doWriteTimeout:timer];
+    } else if (timer == theConnectTimer) {
+        [self doConnectTimeout:timer];
+    }
+}
+
+- (void)doReadTimeout:(NSTimer *)timer
+{
+	#pragma unused(timer)
+	
+	NSTimeInterval timeoutExtension = 0.0;
+	
+	if([theDelegate respondsToSelector:@selector(onSocket:shouldTimeoutReadWithTag:elapsed:bytesDone:)])
+	{
+		timeoutExtension = [theDelegate onSocket:self shouldTimeoutReadWithTag:theCurrentRead->tag
+		                                                               elapsed:theCurrentRead->timeout
+		                                                             bytesDone:theCurrentRead->bytesDone];
+	}
+	
+	if(timeoutExtension > 0.0)
+	{
+		theCurrentRead->timeout += timeoutExtension;
+        NexusTalkTimerTarget *timerTarget = [[NexusTalkTimerTarget alloc] init];
+        timerTarget.actualTarget = self;
+		theReadTimer = [NSTimer timerWithTimeInterval:timeoutExtension
+											   target:timerTarget
+											 selector:@selector(timerFired:)
+											 userInfo:nil
+											  repeats:NO];
+		[self runLoopAddTimer:theReadTimer];
+	}
+	else
+	{
+		// Do not call endCurrentRead here.
+		// We must allow the delegate access to any partial read in the unreadData method.
+		
+		[self closeWithError:[self getReadTimeoutError]];
+	}
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Writing
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+- (void)writeData:(NSData *)data withTimeout:(NSTimeInterval)timeout tag:(long)tag
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	if (data == nil || [data length] == 0) return;
+	if (theFlags & kForbidReadsWrites) return;
+	
+	AsyncWritePacket *packet = [[AsyncWritePacket alloc] initWithData:data timeout:timeout tag:tag];
+	
+	[theWriteQueue addObject:packet];
+	[self scheduleDequeueWrite];
+	
+}
+
+- (void)scheduleDequeueWrite
+{
+	if((theFlags & kDequeueWriteScheduled) == 0)
+	{
+		theFlags |= kDequeueWriteScheduled;
+		[self performSelector:@selector(maybeDequeueWrite) withObject:nil afterDelay:0 inModes:theRunLoopModes];
+	}
+}
+
+/**
+ * Conditionally starts a new write.
+ * 
+ * IF there is not another write in process
+ * AND there is a write queued
+ * AND we have a write stream available
+ * 
+ * This method also handles auto-disconnect post read/write completion.
+**/
+- (void)maybeDequeueWrite
+{
+	// Unset the flag indicating a call to this method is scheduled
+	theFlags &= ~kDequeueWriteScheduled;
+	
+	// If we're not currently processing a write AND we have an available write stream
+	if((theCurrentWrite == nil) && (theWriteStream != NULL))
+	{
+		if([theWriteQueue count] > 0)
+		{
+			// Dequeue the next object in the write queue
+			theCurrentWrite = [theWriteQueue objectAtIndex:0];
+			[theWriteQueue removeObjectAtIndex:0];
+			
+			if([theCurrentWrite isKindOfClass:[AsyncSpecialPacket class]])
+			{
+				// Attempt to start TLS
+				theFlags |= kStartingWriteTLS;
+				
+				// This method won't do anything unless both kStartingReadTLS and kStartingWriteTLS are set
+				[self maybeStartTLS];
+			}
+			else
+			{
+				// Start time-out timer
+				if(theCurrentWrite->timeout >= 0.0)
+				{
+                    NexusTalkTimerTarget *timerTarget = [[NexusTalkTimerTarget alloc] init];
+                    timerTarget.actualTarget = self;
+					theWriteTimer = [NSTimer timerWithTimeInterval:theCurrentWrite->timeout
+															target:timerTarget
+														  selector:@selector(timerFired:)
+														  userInfo:nil
+														   repeats:NO];
+					[self runLoopAddTimer:theWriteTimer];
+				}
+				
+				// Immediately write, if possible
+				[self doSendBytes];
+			}
+		}
+		else if(theFlags & kDisconnectAfterWrites)
+		{
+			if(theFlags & kDisconnectAfterReads)
+			{
+				if(([theReadQueue count] == 0) && (theCurrentRead == nil))
+				{
+					[self disconnect];
+				}
+			}
+			else
+			{
+				[self disconnect];
+			}
+		}
+	}
+}
+
+/**
+ * Call this method in doSendBytes instead of CFWriteStreamCanAcceptBytes().
+ * This method supports the kSocketCanAcceptBytes flag.
+**/
+- (BOOL)canAcceptBytes
+{
+	if (theFlags & kSocketCanAcceptBytes)
+	{
+		return YES;
+	}
+	else
+	{
+		return CFWriteStreamCanAcceptBytes(theWriteStream);
+	}
+}
+
+- (void)doSendBytes
+{
+	if ((theCurrentWrite == nil) || (theWriteStream == NULL))
+	{
+		return;
+	}
+	
+	// Note: This method is not called if theCurrentWrite is an AsyncSpecialPacket (startTLS packet)
+	
+	NSUInteger totalBytesWritten = 0;
+	
+	BOOL done = NO;
+	BOOL error = NO;
+	
+	while (!done && !error && [self canAcceptBytes])
+	{
+		// Figure out what to write
+		NSUInteger bytesRemaining = [theCurrentWrite->buffer length] - theCurrentWrite->bytesDone;
+		NSUInteger bytesToWrite = (bytesRemaining < WRITE_CHUNKSIZE) ? bytesRemaining : WRITE_CHUNKSIZE;
+		
+		UInt8 *writestart = (UInt8 *)([theCurrentWrite->buffer bytes] + theCurrentWrite->bytesDone);
+		
+		// Write
+		CFIndex result = CFWriteStreamWrite(theWriteStream, writestart, bytesToWrite);
+		
+		// Unset the "can accept bytes" flag
+		theFlags &= ~kSocketCanAcceptBytes;
+		
+		// Check results
+		if (result < 0)
+		{
+			error = YES;
+		}
+		else
+		{
+			CFIndex bytesWritten = result;
+			
+			// Update total amount read for the current write
+			theCurrentWrite->bytesDone += bytesWritten;
+			
+			// Update total amount written in this method invocation
+			totalBytesWritten += bytesWritten;
+			
+			// Is packet done?
+			done = ([theCurrentWrite->buffer length] == theCurrentWrite->bytesDone);
+		}
+	}
+	
+	if(done)
+	{
+		[self completeCurrentWrite];
+		[self scheduleDequeueWrite];
+	}
+	else if(error)
+	{
+		CFStreamError err = CFWriteStreamGetError(theWriteStream);
+		[self closeWithError:[self errorFromCFStreamError:err]];
+		return;
+	}
+	else if (totalBytesWritten > 0)
+	{
+		// We're not done with the entire write, but we have written some bytes
+		if ([theDelegate respondsToSelector:@selector(onSocket:didWritePartialDataOfLength:tag:)])
+		{
+			[theDelegate onSocket:self didWritePartialDataOfLength:totalBytesWritten tag:theCurrentWrite->tag];
+		}
+	}
+}
+
+// Ends current write and calls delegate.
+- (void)completeCurrentWrite
+{
+	NSAssert(theCurrentWrite, @"Trying to complete current write when there is no current write.");
+	
+	if ([theDelegate respondsToSelector:@selector(onSocket:didWriteDataWithTag:)])
+	{
+		[theDelegate onSocket:self didWriteDataWithTag:theCurrentWrite->tag];
+	}
+	
+	if (theCurrentWrite != nil) [self endCurrentWrite]; // Caller may have disconnected.
+}
+
+// Ends current write.
+- (void)endCurrentWrite
+{
+	NSAssert(theCurrentWrite, @"Trying to complete current write when there is no current write.");
+	
+	[theWriteTimer invalidate];
+	theWriteTimer = nil;
+	
+	theCurrentWrite = nil;
+}
+
+- (void)doWriteTimeout:(NSTimer *)timer
+{
+	#pragma unused(timer)
+	
+	NSTimeInterval timeoutExtension = 0.0;
+	
+	if([theDelegate respondsToSelector:@selector(onSocket:shouldTimeoutWriteWithTag:elapsed:bytesDone:)])
+	{
+		timeoutExtension = [theDelegate onSocket:self shouldTimeoutWriteWithTag:theCurrentWrite->tag
+		                                                                elapsed:theCurrentWrite->timeout
+		                                                              bytesDone:theCurrentWrite->bytesDone];
+	}
+	
+	if(timeoutExtension > 0.0)
+	{
+		theCurrentWrite->timeout += timeoutExtension;
+        NexusTalkTimerTarget *timerTarget = [[NexusTalkTimerTarget alloc] init];
+        timerTarget.actualTarget = self;
+		theWriteTimer = [NSTimer timerWithTimeInterval:timeoutExtension
+		                                        target:timerTarget
+		                                      selector:@selector(timerFired:)
+		                                      userInfo:nil
+		                                       repeats:NO];
+		[self runLoopAddTimer:theWriteTimer];
+	}
+	else
+	{
+		[self closeWithError:[self getWriteTimeoutError]];
+	}
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Security
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+- (void)startTLS:(NSDictionary *)tlsSettings
+{
+#if DEBUG_THREAD_SAFETY
+	[self checkForThreadSafety];
+#endif
+	
+	if(tlsSettings == nil)
+    {
+        // Passing nil/NULL to CFReadStreamSetProperty will appear to work the same as passing an empty dictionary,
+        // but causes problems if we later try to fetch the remote host's certificate.
+        // 
+        // To be exact, it causes the following to return NULL instead of the normal result:
+        // CFReadStreamCopyProperty(readStream, kCFStreamPropertySSLPeerCertificates)
+        // 
+        // So we use an empty dictionary instead, which works perfectly.
+        
+        tlsSettings = [NSDictionary dictionary];
+    }
+	
+	AsyncSpecialPacket *packet = [[AsyncSpecialPacket alloc] initWithTLSSettings:tlsSettings];
+	
+	[theReadQueue addObject:packet];
+	[self scheduleDequeueRead];
+	
+	[theWriteQueue addObject:packet];
+	[self scheduleDequeueWrite];
+	
+}
+
+- (void)maybeStartTLS
+{
+	// We can't start TLS until:
+	// - All queued reads prior to the user calling StartTLS are complete
+	// - All queued writes prior to the user calling StartTLS are complete
+	// 
+	// We'll know these conditions are met when both kStartingReadTLS and kStartingWriteTLS are set
+	
+	if((theFlags & kStartingReadTLS) && (theFlags & kStartingWriteTLS))
+	{
+		AsyncSpecialPacket *tlsPacket = (AsyncSpecialPacket *)theCurrentRead;
+		
+		BOOL didStartOnReadStream = CFReadStreamSetProperty(theReadStream, kCFStreamPropertySSLSettings,
+														   (__bridge CFDictionaryRef)tlsPacket->tlsSettings);
+		BOOL didStartOnWriteStream = CFWriteStreamSetProperty(theWriteStream, kCFStreamPropertySSLSettings,
+															 (__bridge CFDictionaryRef)tlsPacket->tlsSettings);
+		
+		if(!didStartOnReadStream || !didStartOnWriteStream)
+		{
+            [self closeWithError:[self getSocketError]];
+		}
+	}
+}
+
+- (void)onTLSHandshakeSuccessful
+{
+	if((theFlags & kStartingReadTLS) && (theFlags & kStartingWriteTLS))
+	{
+		theFlags &= ~kStartingReadTLS;
+		theFlags &= ~kStartingWriteTLS;
+		
+		if([theDelegate respondsToSelector:@selector(onSocketDidSecure:)])
+		{
+			[theDelegate onSocketDidSecure:self];
+		}
+		
+		[self endCurrentRead];
+		[self endCurrentWrite];
+		
+		[self scheduleDequeueRead];
+		[self scheduleDequeueWrite];
+	}
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark CF Callbacks
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+- (void)doCFSocketCallback:(CFSocketCallBackType)type
+				 forSocket:(CFSocketRef)sock
+			   withAddress:(NSData *)address
+				  withData:(const void *)pData
+{
+	#pragma unused(address)
+	
+	NSParameterAssert ((sock == theSocket4) || (sock == theSocket6));
+	
+	switch (type)
+	{
+		case kCFSocketConnectCallBack:
+			// The data argument is either NULL or a pointer to an SInt32 error code, if the connect failed.
+			if(pData)
+				[self doSocketOpen:sock withCFSocketError:kCFSocketError];
+			else
+				[self doSocketOpen:sock withCFSocketError:kCFSocketSuccess];
+			break;
+		case kCFSocketAcceptCallBack:
+			[self doAcceptFromSocket:sock withNewNativeSocket:*((CFSocketNativeHandle *)pData)];
+			break;
+		default:
+			NLLogNVPError(@"AsyncSocket %p received unexpected CFSocketCallBackType %i", self, (int)type);
+			break;
+	}
+}
+
+- (void)doCFReadStreamCallback:(CFStreamEventType)type forStream:(CFReadStreamRef)stream
+{
+	#pragma unused(stream)
+	
+	NSParameterAssert(theReadStream != NULL);
+	
+	CFStreamError err;
+	switch (type)
+	{
+		case kCFStreamEventOpenCompleted:
+			theFlags |= kDidCompleteOpenForRead;
+			[self doStreamOpen];
+			break;
+		case kCFStreamEventHasBytesAvailable:
+			if(theFlags & kStartingReadTLS) {
+				[self onTLSHandshakeSuccessful];
+			}
+			else {
+				theFlags |= kSocketHasBytesAvailable;
+				[self doBytesAvailable];
+			}
+			break;
+		case kCFStreamEventErrorOccurred:
+		case kCFStreamEventEndEncountered:
+			err = CFReadStreamGetError (theReadStream);
+			[self closeWithError: [self errorFromCFStreamError:err]];
+			break;
+		default:
+			NLLogNVPError(@"AsyncSocket %p received unexpected CFReadStream callback, CFStreamEventType %i", self, (int)type);
+	}
+}
+
+- (void)doCFWriteStreamCallback:(CFStreamEventType)type forStream:(CFWriteStreamRef)stream
+{
+	#pragma unused(stream)
+	
+	NSParameterAssert(theWriteStream != NULL);
+	
+	CFStreamError err;
+	switch (type)
+	{
+		case kCFStreamEventOpenCompleted:
+			theFlags |= kDidCompleteOpenForWrite;
+			[self doStreamOpen];
+			break;
+		case kCFStreamEventCanAcceptBytes:
+			if(theFlags & kStartingWriteTLS) {
+				[self onTLSHandshakeSuccessful];
+			}
+			else {
+				theFlags |= kSocketCanAcceptBytes;
+				[self doSendBytes];
+			}
+			break;
+		case kCFStreamEventErrorOccurred:
+		case kCFStreamEventEndEncountered:
+			err = CFWriteStreamGetError (theWriteStream);
+			[self closeWithError: [self errorFromCFStreamError:err]];
+			break;
+		default:
+			NLLogNVPError(@"AsyncSocket %p received unexpected CFWriteStream callback, CFStreamEventType %i", self, (int)type);
+	}
+}
+
+/**
+ * This is the callback we setup for CFSocket.
+ * This method does nothing but forward the call to it's Objective-C counterpart
+**/
+static void MyCFSocketCallback (CFSocketRef sref, CFSocketCallBackType type, CFDataRef inAddress, const void *pData, void *pInfo)
+{
+	@autoreleasepool {
+	
+		AsyncSocket *theSocket = (__bridge AsyncSocket *)pInfo;
+		NSData *address = [(__bridge NSData *)inAddress copy];
+		
+		[theSocket doCFSocketCallback:type forSocket:sref withAddress:address withData:pData];
+	
+	}
+}
+
+/**
+ * This is the callback we setup for CFReadStream.
+ * This method does nothing but forward the call to it's Objective-C counterpart
+**/
+static void MyCFReadStreamCallback (CFReadStreamRef stream, CFStreamEventType type, void *pInfo)
+{
+	@autoreleasepool {
+	
+		AsyncSocket *theSocket = (__bridge AsyncSocket *)pInfo;
+		[theSocket doCFReadStreamCallback:type forStream:stream];
+	
+	}
+}
+
+/**
+ * This is the callback we setup for CFWriteStream.
+ * This method does nothing but forward the call to it's Objective-C counterpart
+**/
+static void MyCFWriteStreamCallback (CFWriteStreamRef stream, CFStreamEventType type, void *pInfo)
+{
+	@autoreleasepool {
+	
+		AsyncSocket *theSocket = (__bridge AsyncSocket *)pInfo;
+		[theSocket doCFWriteStreamCallback:type forStream:stream];
+	
+	}
+}
+
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+#pragma mark Class Methods
+////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+// Return line separators.
++ (NSData *)CRLFData
+{
+	return [NSData dataWithBytes:"\x0D\x0A" length:2];
+}
+
++ (NSData *)CRData
+{
+	return [NSData dataWithBytes:"\x0D" length:1];
+}
+
++ (NSData *)LFData
+{
+	return [NSData dataWithBytes:"\x0A" length:1];
+}
+
++ (NSData *)ZeroData
+{
+	return [NSData dataWithBytes:"" length:1];
+}
+
+@end
diff --git a/NexusVideoPlayer/AudioPlayer.h b/NexusVideoPlayer/AudioPlayer.h
new file mode 100644
index 0000000..5e41aeb
--- /dev/null
+++ b/NexusVideoPlayer/AudioPlayer.h
@@ -0,0 +1,33 @@
+//
+//  AudioPlayer.h
+//  NexusVideoPlayer
+//
+//  Created by Loren Kirkby on 10/6/12.
+//  Copyright (c) 2012 Dropcam. All rights reserved.
+//
+
+#import "Nexustalk.pb.h"
+
+#import "PlayerBase.h"
+#import "DCAdaptiveJitterBuffer.h"
+
+@interface AudioPlayer : PlayerBase
+
+- (instancetype)initWithSampleRate:(int)sampleRate
+                      packetBuffer:(DCAdaptiveJitterBuffer *)buffer
+                       privateData:(NSData *)privateData;
+
+@property (nonatomic, assign) AudioStreamBasicDescription asbd;
+@property (readonly, nonatomic, strong) NSData *privateData;
+@property (readonly, nonatomic, assign) double lastSampleTime;
+@property (readonly, nonatomic, assign) int64_t lastPTS;
+
+- (void)setVolume:(float)volume;
+
+- (AudioQueueRef)audioQueue;
+- (AudioQueueBufferRef)dequeueUsusedBuffer;
+- (void)enqueueBuffer:(AudioQueueBufferRef)buffer
+             withAspd:(AudioStreamPacketDescription *)aspd
+              withPTS:(int64_t *)pts;
+
+@end
diff --git a/NexusVideoPlayer/AudioPlayer.m b/NexusVideoPlayer/AudioPlayer.m
new file mode 100644
index 0000000..da94d6b
--- /dev/null
+++ b/NexusVideoPlayer/AudioPlayer.m
@@ -0,0 +1,218 @@
+//
+//  AudioPlayer.m
+//  NexusVideoPlayer
+//
+//  Created by Loren Kirkby on 10/6/12.
+//  Copyright (c) 2012 Dropcam. All rights reserved.
+//
+
+#import "AudioPlayer.h"
+#import "NLCommonLoggingNVP.h"
+
+static const NSInteger AudioPlayerBufferSize = 16384;
+static const NSInteger AudioPlayerNumberOfBuffers = 3;
+
+@interface AudioPlayer () {
+  AudioQueueRef _aq;
+}
+
+- (void)onUnusedBuffer:(AudioQueueBufferRef)buffer;
+
+@property (readwrite, nonatomic, strong) NSData *privateData;
+@property (readwrite, nonatomic, assign) double lastSampleTime;
+@property (readwrite, nonatomic, assign) int64_t lastPTS;
+@property (readwrite, nonatomic, strong) NSMutableArray *unusedAudioBuffers;
+@property (nonatomic, strong) NSLock *unusedAudioBuffersLock;
+
+@end
+
+@implementation AudioPlayer
+
++ (void)initAudioSession {
+  AVAudioSession *session = [AVAudioSession sharedInstance];
+    
+  if ([session isInputAvailable]) {
+    NSError *error;
+    BOOL success = [session setCategory:AVAudioSessionCategoryPlayAndRecord
+                                withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker
+                                      error:&error];
+    if (!success) {
+      NLLogNVPWarn(@"NexusVideoPlayer: not able to force audio output to speaker");
+    }
+  } else {
+    [session setCategory:AVAudioSessionCategoryPlayback error:NULL];
+  }
+    
+  [session setActive:YES error:NULL];
+}
+
+static void AudioPlayerAudioQueueOutputCallback(void* inClientData,
+                                                AudioQueueRef inAQ,
+                                                AudioQueueBufferRef inBuffer) {
+  AudioPlayer *player = (__bridge AudioPlayer *)(inClientData);
+  [player onUnusedBuffer:inBuffer];
+}
+
+- (instancetype)initWithSampleRate:(int)sampleRate
+                      packetBuffer:(DCAdaptiveJitterBuffer *)buffer
+                       privateData:(NSData *)privateData {
+  if (self = [super initWithTimebase:sampleRate jitterBuffer:buffer]) {
+    [AudioPlayer initAudioSession];
+
+    _unusedAudioBuffers = [[NSMutableArray alloc] init];
+    _unusedAudioBuffersLock = [[NSLock alloc] init];
+    _privateData = privateData;
+    _lastSampleTime = 0.0;
+    _lastPTS = INT64_MIN;
+  }
+  return self;
+}
+
+- (void)dealloc {
+  [self stop];
+}
+
+- (void)setVolume:(float)volume {
+  AudioQueueSetParameter(_aq, kAudioQueueParam_Volume, volume);
+}
+
+- (BOOL)isRunning {
+  return _aq != NULL;
+}
+
+- (void)startAudioQueueWithAsbd:(AudioStreamBasicDescription *)asbd {
+  NSAssert(_aq == NULL, @"Starting audio player when already started.");
+
+  OSStatus err = AudioQueueNewOutput(asbd,
+                                     AudioPlayerAudioQueueOutputCallback,
+                                     (__bridge void *)(self),
+                                     NULL,
+                                     kCFRunLoopCommonModes,
+                                     0,
+                                     &_aq);
+
+  if (err != 0) {
+    NLLogNVPError(@"Error opening audio queue: %ld", (long)err);
+    return;
+  }
+
+  for (NSInteger i = 0; i < AudioPlayerNumberOfBuffers; i++) {
+    AudioQueueBufferRef buf;
+    err = AudioQueueAllocateBuffer(_aq, AudioPlayerBufferSize, &buf);
+    if (err != 0) {
+      NLLogNVPError(@"Error creating audio buffer: %ld", (long)err);
+      return;
+    }
+
+    NSValue *valObj = [NSValue valueWithPointer:buf];
+    [self.unusedAudioBuffersLock lock];
+    [self.unusedAudioBuffers addObject:valObj];
+    [self.unusedAudioBuffersLock unlock];
+  }
+
+  self.lastPTS = INT64_MIN;
+  self.lastSampleTime = 0.0;
+
+  // Queue up initial buffers if possible
+  [self queueNextPackets];
+
+  err = AudioQueueStart(_aq, NULL);
+  if (err != 0)  {
+    NLLogNVPError(@"Error starting audio queue: %ld", (long)err);
+    return;
+  }
+}
+
+- (void)stopAudioQueue {
+  if (_aq != NULL) {
+    AudioQueueStop(_aq, true);
+    AudioQueueDispose(_aq, true);
+  }
+  _aq = NULL;
+  [self.unusedAudioBuffersLock lock];
+  [self.unusedAudioBuffers removeAllObjects];
+  [self.unusedAudioBuffersLock unlock];
+}
+
+- (void)start {
+  [self startAudioQueueWithAsbd:&_asbd];
+}
+
+- (void)stop {
+  [self stopAudioQueue];
+}
+
+- (AudioQueueRef)audioQueue {
+  return _aq;
+}
+
+// Only call from audio thread
+- (AudioQueueBufferRef)dequeueUsusedBuffer {
+  [self.unusedAudioBuffersLock lock];
+  NSUInteger bufferCount = [self.unusedAudioBuffers count];
+  [self.unusedAudioBuffersLock unlock];
+  if (bufferCount == 0) {
+    return NULL;
+  }
+  [self.unusedAudioBuffersLock lock];
+  NSValue *bufferVal = [self.unusedAudioBuffers lastObject];
+  [self.unusedAudioBuffers removeLastObject];
+  [self.unusedAudioBuffersLock unlock];
+  return [bufferVal pointerValue];
+}
+
+- (void)enqueueBuffer:(AudioQueueBufferRef)buffer
+             withAspd:(AudioStreamPacketDescription *)aspd
+              withPTS:(int64_t *)pts {
+  AudioTimeStamp outTime = {0};
+  outTime.mFlags = kAudioTimeStampSampleTimeValid;
+
+  int numAspds = (aspd != NULL) ? 1 : 0;
+
+  if (_aq == NULL) return;
+  OSErr queueResult =
+  AudioQueueEnqueueBufferWithParameters(_aq, buffer, numAspds, aspd,
+                                        0, 0, 0, NULL, NULL, &outTime);
+
+  if (queueResult != 0) {
+    NLLogNVPError(@"Error queuing audio buffer: %d", queueResult);
+  }
+
+  if (pts != NULL) {
+    // Update lastSampleTime and lastPTS (for accurate currentTimestamp)
+    self.lastSampleTime = outTime.mSampleTime;
+    self.lastPTS = *pts;
+  }
+}
+
+- (void)onUnusedBuffer:(AudioQueueBufferRef)buffer {
+  NSValue *bufferVal = [NSValue valueWithPointer:buffer];
+  [self.unusedAudioBuffersLock lock];
+  [self.unusedAudioBuffers addObject:bufferVal];
+  [self.unusedAudioBuffersLock unlock];
+  [self queueNextPackets];
+}
+
+- (void)queueNextPackets {
+  // Fill unusued buffers and queue if possible
+  // Should be implemented in subclasses
+  [self doesNotRecognizeSelector:_cmd];
+}
+
+- (int64_t)currentTimestamp {
+  AudioTimeStamp timestamp = {0};
+  if (self.audioQueue == NULL) {
+    return 0;
+  }
+
+  OSStatus res = AudioQueueGetCurrentTime(self.audioQueue, NULL, &timestamp, NULL);
+  if (res != 0) {
+    NLLogNVPError(@"Couldn't get AudioQueue time! (%d)", (int)res);
+    return self.lastPTS;
+  }
+
+  double offset = timestamp.mSampleTime - self.lastSampleTime;
+  return self.lastPTS + (int64_t)offset;
+}
+
+@end
diff --git a/NexusVideoPlayer/AudioPlayerAAC.h b/NexusVideoPlayer/AudioPlayerAAC.h
new file mode 100644
index 0000000..77c76cb
--- /dev/null
+++ b/NexusVideoPlayer/AudioPlayerAAC.h
@@ -0,0 +1,13 @@
+//
+//  AudioPlayerAAC.h
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 12/10/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#import "AudioPlayer.h"
+
+@interface AudioPlayerAAC : AudioPlayer
+
+@end
diff --git a/NexusVideoPlayer/AudioPlayerAAC.m b/NexusVideoPlayer/AudioPlayerAAC.m
new file mode 100644
index 0000000..bffdba9
--- /dev/null
+++ b/NexusVideoPlayer/AudioPlayerAAC.m
@@ -0,0 +1,102 @@
+//
+//  AudioPlayerAAC.m
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 12/10/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#import "AudioPlayerAAC.h"
+#import "AAC.h"
+#import "DebugLog.h"
+#import "NLCommonLoggingNVP.h"
+
+#include "JitterTunables.h"
+
+@interface AudioPlayerAAC()
+
+@property (nonatomic, strong) NSThread *bufferWaiter;
+
+@end
+
+@implementation AudioPlayerAAC
+
+- (instancetype)initWithSampleRate:(int)sampleRate
+                      packetBuffer:(DCAdaptiveJitterBuffer *)buffer
+                       privateData:(NSData *)privateData {
+  self = [super initWithSampleRate:sampleRate packetBuffer:buffer privateData:privateData];
+  if (self) {
+    struct AACAudioSpecificConfig aacConfig;
+    if (!DecodeAACAudioSpecificConfig(privateData, &aacConfig)) {
+      NLLogNVPWarn(@"Failed to decode AAC AudioSpecificConfig");
+      return nil;
+    }
+
+    AudioStreamBasicDescription asbd = {0};
+    asbd.mSampleRate = aacConfig.frequency;
+    if (aacConfig.frequency != sampleRate) {
+      NLLogNVPWarn(@"AAC sample rate discrepency! (%d != %d)", aacConfig.frequency, sampleRate);
+      return nil;
+    }
+    asbd.mFormatID = kAudioFormatMPEG4AAC;
+    asbd.mFormatFlags = 0;
+    asbd.mFramesPerPacket = aacConfig.frameLength;
+    asbd.mChannelsPerFrame = aacConfig.channelConfig;
+    self.asbd = asbd;
+  }
+
+  return self;
+}
+
+- (void)start {
+  [super start];
+
+  self.bufferWaiter = [[NSThread alloc] initWithTarget:self
+                                              selector:@selector(bufferWaiterThreadEntry)
+                                                object:nil];
+  [self.bufferWaiter start];
+}
+
+- (void)stop {
+  [self.jitterBuffer cancelWaiters];
+  [self.bufferWaiter cancel];
+
+  [super stop];
+}
+
+- (void)queueNextPackets {
+  while (self.jitterBuffer.numPackets > 0) {
+    AudioQueueBufferRef buffer = [self dequeueUsusedBuffer];
+    if (buffer == NULL) {
+      break;
+    }
+
+    Packet *packet = [self.jitterBuffer nextPacket];
+    if (packet == nil) {
+      NLLogNVPWarn(@"Jitter buffer returned NULL when not empty!");
+      break;
+    }
+
+    memcpy(buffer->mAudioData, [packet.data bytes], [packet.data length]);
+
+    AudioStreamPacketDescription aspd = {0};
+    aspd.mDataByteSize = (UInt32)[packet.data length];
+    buffer->mAudioDataByteSize = aspd.mDataByteSize;
+
+    int64_t pts = packet.PTS;
+    [self enqueueBuffer:buffer withAspd:&aspd withPTS:&pts];
+  }
+}
+
+- (void)bufferWaiterThreadEntry {
+  NSThread *thread = [NSThread currentThread];
+  while (![thread isCancelled]) {
+    if (![self.jitterBuffer waitForPacket]) continue;
+
+    [self performSelectorOnMainThread:@selector(queueNextPackets)
+                           withObject:nil
+                        waitUntilDone:YES];
+  }
+}
+
+@end
diff --git a/NexusVideoPlayer/AudioPlayerOpus.h b/NexusVideoPlayer/AudioPlayerOpus.h
new file mode 100644
index 0000000..2e736cf
--- /dev/null
+++ b/NexusVideoPlayer/AudioPlayerOpus.h
@@ -0,0 +1,10 @@
+#import "AudioPlayer.h"
+
+/**
+ AudioPlayerOpus is a concrete implementation of AudioPlayer that decodes audio with the Opus codec.
+ 
+ @see https://www.opus-codec.org/ for more information about Opus.
+ */
+@interface AudioPlayerOpus : AudioPlayer
+
+@end
diff --git a/NexusVideoPlayer/AudioPlayerOpus.m b/NexusVideoPlayer/AudioPlayerOpus.m
new file mode 100644
index 0000000..9c38ea7
--- /dev/null
+++ b/NexusVideoPlayer/AudioPlayerOpus.m
@@ -0,0 +1,114 @@
+#import <libopus/opus.h>
+
+#import "DebugLog.h"
+#import "NLCommonLoggingNVP.h"
+
+#import "AudioPlayerOpus.h"
+
+static NSInteger const AudioPlayerOpusNumberOfChannels = 1;
+static NSInteger const AudioPlayerOpusNumberOfBytesPerSample = 2;
+
+@interface AudioPlayerOpus()
+
+@property (nonatomic, assign) OpusDecoder *decoder;
+@property (nonatomic, strong) NSThread *bufferWaiter;
+
+@end
+
+@implementation AudioPlayerOpus
+
+- (instancetype)initWithSampleRate:(int)sampleRate
+                      packetBuffer:(DCAdaptiveJitterBuffer *)buffer
+                       privateData:(NSData *)privateData {
+  self = [super initWithSampleRate:sampleRate packetBuffer:buffer privateData:privateData];
+  if (self != nil) {
+    int error;
+    _decoder = opus_decoder_create(sampleRate, AudioPlayerOpusNumberOfChannels, &error);
+
+    if(error != OPUS_OK) {
+      NLLogNVPError(@"Error creating Opus audio decoder: %s", opus_strerror(error));
+      return nil;
+    }
+
+    AudioStreamBasicDescription asbd = {0};
+    asbd.mSampleRate = sampleRate;
+    asbd.mFormatID = kAudioFormatLinearPCM;
+    asbd.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
+    asbd.mBitsPerChannel = 16;
+    asbd.mFramesPerPacket = 1;
+    asbd.mChannelsPerFrame = 1;
+    asbd.mBytesPerFrame = 2;
+    asbd.mBytesPerPacket = 2;
+
+    self.asbd = asbd;
+  }
+
+  return self;
+}
+
+- (void)dealloc {
+  opus_decoder_destroy(_decoder);
+}
+
+- (void)start {
+  [super start];
+
+  self.bufferWaiter = [[NSThread alloc] initWithTarget:self
+                                              selector:@selector(bufferWaiterThreadEntry)
+                                                object:nil];
+  [self.bufferWaiter start];
+}
+
+- (void)stop {
+  [self.jitterBuffer cancelWaiters];
+  [self.bufferWaiter cancel];
+
+  [super stop];
+}
+
+- (void)queueNextPackets {
+  while (self.jitterBuffer.numPackets > 0) {
+    AudioQueueBufferRef buffer = [self dequeueUsusedBuffer];
+    if (buffer == NULL) {
+      break;
+    }
+
+    Packet *packet = [self.jitterBuffer nextPacket];
+    if (packet == nil) {
+      NLLogNVPWarn(@"Jitter buffer returned NULL when not empty!");
+      break;
+    }
+
+    int bufferFrameSize = buffer->mAudioDataBytesCapacity / AudioPlayerOpusNumberOfBytesPerSample;
+
+    int numDecodedSamples = opus_decode(self.decoder,
+                                        packet.data.bytes,
+                                        (opus_uint32)packet.data.length,
+                                        buffer->mAudioData,
+                                        bufferFrameSize,
+                                        0);
+
+    if(numDecodedSamples < 0) {
+      NLLogNVPError(@"Opus decoder encountered an error: %s", opus_strerror(numDecodedSamples));
+      break;
+    }
+
+    buffer->mAudioDataByteSize = numDecodedSamples * AudioPlayerOpusNumberOfBytesPerSample;
+
+    int64_t pts = packet.PTS;
+    [self enqueueBuffer:buffer withAspd:NULL withPTS:&pts];
+  }
+}
+
+- (void)bufferWaiterThreadEntry {
+  NSThread *thread = [NSThread currentThread];
+  while (![thread isCancelled]) {
+    if (![self.jitterBuffer waitForPacket]) continue;
+
+    [self performSelectorOnMainThread:@selector(queueNextPackets)
+                           withObject:nil
+                        waitUntilDone:YES];
+  }
+}
+
+@end
diff --git a/NexusVideoPlayer/AudioPlayerSpeex.h b/NexusVideoPlayer/AudioPlayerSpeex.h
new file mode 100644
index 0000000..560775f
--- /dev/null
+++ b/NexusVideoPlayer/AudioPlayerSpeex.h
@@ -0,0 +1,13 @@
+//
+//  AudioPlayerSpeex.h
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 12/10/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#import "AudioPlayer.h"
+
+@interface AudioPlayerSpeex : AudioPlayer
+
+@end
diff --git a/NexusVideoPlayer/AudioPlayerSpeex.mm b/NexusVideoPlayer/AudioPlayerSpeex.mm
new file mode 100644
index 0000000..668e1e0
--- /dev/null
+++ b/NexusVideoPlayer/AudioPlayerSpeex.mm
@@ -0,0 +1,253 @@
+//
+//  AudioPlayerSpeex.mm
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 12/10/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#import "AudioPlayerSpeex.h"
+#import "DebugLog.h"
+#import "NLCommonLoggingNVP.h"
+
+#include <vector>
+#include <algorithm>
+
+#import <speex/speex.h>
+
+#include "SkipInterpCounter.hpp"
+#include "JitterTunables.h"
+
+@interface AudioPlayerSpeex() {
+  SpeexBits bits;
+  void *dec_state;
+  
+  // C++ classes don't work well as properties
+  SkipInterpCounter _skipInterpCounter;
+  std::vector<int16_t> _skipFrameBuffer;
+  std::vector<int16_t> _ratioLookup16;
+}
+
+@property (nonatomic, assign) BOOL hasSoundActivity;
+@property (nonatomic, assign) int frameSizeSamples;
+
+@end
+
+@implementation AudioPlayerSpeex
+
+- (instancetype)initWithSampleRate:(int)sampleRate
+                      packetBuffer:(DCAdaptiveJitterBuffer *)buffer
+                       privateData:(NSData *)privateData {
+  self = [super initWithSampleRate:sampleRate packetBuffer:buffer privateData:privateData];
+  if (self) {
+    speex_bits_init(&bits);
+
+    if (sampleRate == 16000) {
+      dec_state = speex_decoder_init(&speex_wb_mode);
+    } else {
+      dec_state = speex_decoder_init(&speex_nb_mode);
+    }
+
+    int enh = 1;
+    speex_decoder_ctl(dec_state, SPEEX_SET_ENH, &enh);
+    int frame_size;
+    speex_decoder_ctl(dec_state, SPEEX_GET_FRAME_SIZE, &frame_size);
+    _frameSizeSamples = frame_size;
+
+    _hasSoundActivity = YES;
+    _skipFrameBuffer.resize(_frameSizeSamples);
+
+    // Make the ratio lookup table
+    _ratioLookup16.resize(_frameSizeSamples);
+    for (int i = 0; i < _frameSizeSamples; i++) {
+      float ratio = (float)i / (_frameSizeSamples - 1);
+      _ratioLookup16[i] = ratio * INT16_MAX;
+    }
+
+    AudioStreamBasicDescription asbd = {0};
+    asbd.mSampleRate = sampleRate;
+    asbd.mFormatID = kAudioFormatLinearPCM;
+    asbd.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
+    asbd.mBitsPerChannel = 16;
+    asbd.mFramesPerPacket = 1;
+    asbd.mChannelsPerFrame = 1;
+    asbd.mBytesPerFrame = 2;
+    asbd.mBytesPerPacket = 2;
+
+    self.asbd = asbd;
+  }
+
+  return self;
+}
+
+- (void)dealloc {
+  if (dec_state) {
+    speex_bits_destroy(&bits);
+    speex_decoder_destroy(dec_state);
+  }
+}
+
+- (BOOL)tryDecodeBitsInto:(int16_t*)buffer {
+  if (speex_bits_remaining(&bits) > 0) {
+    int res = speex_decode_int(dec_state, &bits, (spx_int16_t*)buffer);
+    if (res == -1) {
+      // Not enough bits left to decode
+      return NO;
+    }
+    else if (res < 0) {
+      NLLogNVPError(@"Speex error: %d", res);
+      return NO;
+    }
+    else {
+      // Is there sound activity right now?
+      spx_int32_t activity = 0;
+      speex_decoder_ctl(dec_state, SPEEX_GET_ACTIVITY, &activity);
+      if (activity < 30) {
+        self.hasSoundActivity = NO;
+      }
+      else {
+        self.hasSoundActivity = YES;
+      }
+      
+      return YES;
+    }
+  }
+  return NO;
+}
+
+- (BOOL)interpInto:(AudioQueueBufferRef)buffer {
+  int res = speex_decode_int(dec_state, NULL, (spx_int16_t*)buffer->mAudioData);
+  if (res < 0) {
+    return NO;
+  }
+  else {
+    buffer->mAudioDataByteSize = _frameSizeSamples * sizeof(int16_t);
+    return YES;
+  }
+}
+
+- (BOOL)loadNextSamplesInto:(int16_t*)buffer withUpdatePTS:(int64_t*)newPTS {
+  BOOL decoded = [self tryDecodeBitsInto:buffer];
+  if (decoded) {
+    return YES;
+  }
+  
+  // Try to get more data from buffer
+  int64_t PTS = [self.jitterBuffer peekPTS];
+  if (PTS != INT64_MIN) {
+    BOOL firstPacket = (self.lastPTS == INT64_MIN);
+    
+    // Check the timing of this packet to see if we should play it right now
+    if (PTS < self.lastPTS) {
+      // Too late, skip packet
+      [self.jitterBuffer nextPacket];
+    }
+    else if (!firstPacket && (PTS - self.currentTimestamp > 8 * _frameSizeSamples)) {
+      // Too early, wait a bit
+      NLLogNVPWarn(@"Audio packet in future: PTS = %lld, last PTS = %lld", PTS, self.lastPTS);
+    }
+    else {
+      Packet *packet = [self.jitterBuffer nextPacket];
+      float playSpeed = [self.jitterBuffer playSpeedForPTS:packet.PTS];
+      if (self.hasSoundActivity) {
+        playSpeed = std::max(AJB_ACTIVITY_MIN_PLAY_SPEED, playSpeed);
+        playSpeed = std::min(AJB_ACTIVITY_MAX_PLAY_SPEED, playSpeed);
+      }
+      _skipInterpCounter.setPlaybackSpeed(playSpeed);
+      
+      speex_bits_read_from(&bits, (char*)[packet.data bytes], [packet.data length]);
+      
+      *newPTS = packet.PTS;
+      
+      decoded = [self tryDecodeBitsInto:buffer];
+    }
+  }
+  
+  return decoded;
+}
+
+- (void)fillBuffer:(AudioQueueBufferRef)buffer {
+  const int numBytes = self.frameSizeSamples * sizeof(int16_t);
+  BOOL shouldSkip = _skipInterpCounter.shouldSkip(),
+       shouldInterp = _skipInterpCounter.shouldInterp();
+  
+  int64_t updatedPTS = INT64_MIN;
+  BOOL result = NO;
+  if (shouldInterp) {
+    result = [self interpInto:buffer];
+  }
+  else {
+    int16_t *audioBuf = (int16_t*)buffer->mAudioData;
+    result = [self loadNextSamplesInto:audioBuf withUpdatePTS:&updatedPTS];
+    if (shouldSkip) {
+      // Do a crossfade-type thing here with the last frame
+      // To avoid popping artifacts
+      result = [self loadNextSamplesInto:&_skipFrameBuffer[0] withUpdatePTS:&updatedPTS];
+      
+      for (int i = 0; i < self.frameSizeSamples; i++) {
+        int16_t skipBufRatio = _ratioLookup16[i];
+        int16_t origBufRatio = INT16_MAX - skipBufRatio;
+        
+        int32_t origVal = (audioBuf[i] * origBufRatio) / INT16_MAX;
+        int32_t skipVal = (_skipFrameBuffer[i] * skipBufRatio) / INT16_MAX;
+        audioBuf[i] = origVal + skipVal;
+      }
+    }
+    
+    if (result) {
+      buffer->mAudioDataByteSize = numBytes;
+    }
+    else {
+      result = [self interpInto:buffer];
+    }
+  }
+  
+  _skipInterpCounter.tick();
+  
+  if (!result) {
+    // Fill with silence
+    NLLogNVPWarn(@"Speex couldn't fill or interp; writing silence");
+    memset(buffer->mAudioData, 0, numBytes);
+    buffer->mAudioDataByteSize = numBytes;
+  }
+  
+  if (updatedPTS != INT64_MIN) {
+    [self enqueueBuffer:buffer withAspd:NULL withPTS:&updatedPTS];
+  } else {
+    [self enqueueBuffer:buffer withAspd:NULL withPTS:NULL];
+  }
+}
+
+- (void)queueNextPackets {
+  while (YES) {
+    AudioQueueBufferRef buf = [self dequeueUsusedBuffer];
+    if (buf == NULL) break;
+
+    if (buf->mAudioDataBytesCapacity < self.frameSizeSamples * sizeof(int16_t)) {
+      NLLogNVPError(@"Error filling audio buffer: frame size is %d, buffer only %u bytes",
+            self.frameSizeSamples,
+            (unsigned int)buf->mAudioDataBytesCapacity);
+      break;
+    }
+
+    [self fillBuffer:buf];
+  }
+}
+
+- (int64_t)currentTimestamp {
+  AudioTimeStamp timestamp = {0};
+  if (self.audioQueue == NULL) {
+    return 0;
+  }
+
+  OSStatus res = AudioQueueGetCurrentTime(self.audioQueue, NULL, &timestamp, NULL);
+  if (res != 0) {
+    NLLogNVPError(@"Couldn't get AudioQueue time! (%d)", (int)res);
+    return self.lastPTS;
+  }
+
+  double offset = timestamp.mSampleTime - self.lastSampleTime;
+  return self.lastPTS + (int64_t)(offset * _skipInterpCounter.getPlaybackSpeed());
+}
+
+@end
diff --git a/NexusVideoPlayer/AudioRecordStreamer.h b/NexusVideoPlayer/AudioRecordStreamer.h
new file mode 100644
index 0000000..ed9f372
--- /dev/null
+++ b/NexusVideoPlayer/AudioRecordStreamer.h
@@ -0,0 +1,24 @@
+//
+//  AudioStreamConnection.h
+//  Dropcam
+//
+//  Created by Loren Kirkby on 8/17/11.
+//  Copyright 2011 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+#import <AudioToolbox/AudioToolbox.h>
+#import "NexusTalkConnection.h"
+#import "SpeexStream.h"
+
+#define NUM_RECORD_BUFFERS 5
+
+@interface AudioRecordStreamer : NSObject
+
+- (id)initWithConnection:(NexusTalkConnection*)connection;
+- (void)start;
+- (void)stop;
+- (float)level;
+- (BOOL)isRunning;
+
+@end
\ No newline at end of file
diff --git a/NexusVideoPlayer/AudioRecordStreamer.m b/NexusVideoPlayer/AudioRecordStreamer.m
new file mode 100644
index 0000000..a2b5998
--- /dev/null
+++ b/NexusVideoPlayer/AudioRecordStreamer.m
@@ -0,0 +1,84 @@
+//
+//  AudioStreamConnection.m
+//  Dropcam
+//
+//  Created by Loren Kirkby on 8/17/11.
+//  Copyright 2011 Dropcam. All rights reserved.
+//
+
+#import "AudioRecordStreamer.h"
+
+static const int sample_rate = 16000;
+
+@interface AudioRecordStreamer () <SpeexStreamDelegate>
+
+@property (strong, nonatomic) SpeexStream *speexStream;
+@property (strong, nonatomic) NexusTalkConnection *conn;
+@property (assign, nonatomic) BOOL sentFirstPayload;
+@property (assign, nonatomic) uint32_t sessionId;
+
+@end
+
+@implementation AudioRecordStreamer
+
+- (id)initWithConnection:(NexusTalkConnection *)conn
+{
+  self = [super init];
+  if (self) {
+    _conn = conn;
+    _speexStream = [SpeexStream new];
+  }
+  
+  return self;
+}
+
+- (void)dealloc {
+  [self stop];
+}
+
+- (BOOL)isRunning {
+  return [self.speexStream isRunning];
+}
+
+- (void)start {
+  [self stop];
+  
+  self.sentFirstPayload = NO;
+  self.sessionId = rand();
+  
+  self.speexStream.delegate = self;
+  [self.speexStream setup];
+  [self.speexStream start];
+}
+
+- (void)stop {
+  if ([self isRunning]) {
+    [self.speexStream stop];
+    self.speexStream.delegate = nil;
+    // send an empty payload to mark the end of an audio session
+    [self.conn sendAudioPayload:[NSData data] sessionId:self.sessionId codecType:nil sampleRate:nil];
+  }
+}
+
+- (float)level {
+  return [self.speexStream getLevel];
+}
+
+#pragma mark - SpeexStreamDelegate Methods
+
+- (void)speexEncodedFrame:(char *)frame length:(size_t)length inStartTime:(const AudioTimeStamp *)inStartTime
+{
+  NSData *payload = [NSData dataWithBytes:frame length:length];
+  NSNumber *codecType = nil;
+  NSNumber *sampleRate = nil;
+  
+  if (!self.sentFirstPayload) {
+    codecType = [NSNumber numberWithInt:CodecTypeSpeex];
+    sampleRate = [NSNumber numberWithInt:sample_rate];
+    self.sentFirstPayload = YES;
+  }
+  
+  [self.conn sendAudioPayload:payload sessionId:self.sessionId codecType:codecType sampleRate:sampleRate];
+}
+
+@end
\ No newline at end of file
diff --git a/NexusVideoPlayer/AudioStream.h b/NexusVideoPlayer/AudioStream.h
new file mode 100644
index 0000000..9c782f4
--- /dev/null
+++ b/NexusVideoPlayer/AudioStream.h
@@ -0,0 +1,36 @@
+//
+//  AudioStream.h
+//  Dropcam
+//
+//  Created by Yohannes Kifle on 25/06/2013.
+//
+//
+
+#import <Foundation/Foundation.h>
+#import <AudioToolbox/AudioToolbox.h>
+
+@protocol AudioStreamDelegate;
+
+@interface AudioStream : NSObject
+
+@property (weak, nonatomic)id <AudioStreamDelegate> delegate;
+@property (assign, nonatomic) int bufferSize;
+@property (assign, nonatomic) Float64 sampleRate;
+@property (assign, nonatomic) BOOL isRunning;
+
+- (id) initWithSampleRate:(Float64)sampleRate;
+- (BOOL)setup;
+- (BOOL)start;
+- (void)stop;
+- (float)getLevel;
+
+@end
+
+@protocol AudioStreamDelegate <NSObject>
+
+- (void)audioQueueInputCallbackInBuffer:(AudioQueueBufferRef)inBuffer
+                    inStartTime:(const AudioTimeStamp *)inStartTime
+     inNumberPacketDescriptions:(UInt32)inNumberPacketDescriptions
+                  inPacketDescs:(const AudioStreamPacketDescription *)inPacketDescs;
+
+@end
diff --git a/NexusVideoPlayer/AudioStream.m b/NexusVideoPlayer/AudioStream.m
new file mode 100644
index 0000000..f51cd9c
--- /dev/null
+++ b/NexusVideoPlayer/AudioStream.m
@@ -0,0 +1,143 @@
+//
+//  AudioStream.m
+//  Dropcam
+//
+//  Created by Yohannes Kifle on 25/06/2013.
+//
+//
+
+#import "AudioStream.h"
+#import "NLCommonLoggingNVP.h"
+
+static const int num_record_buffers = 5;
+
+@interface AudioStream () {
+  AudioQueueRef aq;
+  AudioQueueBufferRef	buffers[num_record_buffers];
+}
+
+- (void)audioQueueInputCallback:(AudioQueueRef)inAQ
+                       inBuffer:(AudioQueueBufferRef)inBuffer
+                    inStartTime:(const AudioTimeStamp *)inStartTime
+     inNumberPacketDescriptions:(UInt32)inNumberPacketDescriptions
+                  inPacketDescs:(const AudioStreamPacketDescription *)inPacketDescs;
+
+@end
+
+static void ASCAudioQueueInputCallback (void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer,
+                                        const AudioTimeStamp *inStartTime, UInt32 inNumberPacketDescriptions,
+                                        const AudioStreamPacketDescription *inPacketDescs)
+{
+  AudioStream *stream = (__bridge AudioStream *)(inUserData);
+  [stream audioQueueInputCallback:inAQ inBuffer:inBuffer inStartTime:inStartTime inNumberPacketDescriptions:inNumberPacketDescriptions inPacketDescs:inPacketDescs];
+}
+
+@implementation AudioStream
+
+- (id) initWithSampleRate:(Float64)sampleRate {
+  if (self = [super init]) {
+    _sampleRate = sampleRate;
+    _isRunning = NO;
+  }
+  return self;
+}
+
+- (BOOL)setup {
+  AudioStreamBasicDescription asbd = {
+    .mSampleRate = self.sampleRate,
+    .mFormatID = kAudioFormatLinearPCM,
+    .mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked,
+    .mBitsPerChannel = 16,
+    .mFramesPerPacket = 1,
+    .mChannelsPerFrame = 1,
+    .mBytesPerFrame = 2,
+    .mBytesPerPacket = 2
+  };
+  
+  OSStatus err = AudioQueueNewInput(&asbd, ASCAudioQueueInputCallback, (__bridge void *)(self), CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0, &aq);
+  
+  UInt32 enabledLevelMeter = true;
+  AudioQueueSetProperty(aq, kAudioQueueProperty_EnableLevelMetering, &enabledLevelMeter, sizeof(UInt32));
+  
+  if (err != 0) {
+    NLLogNVPError(@"Error creating audio input queue: %ld", err);
+  }
+  
+  for (int i = 0; i < num_record_buffers; i++) {
+    NLLogNVPInfo(@"Creating buffer of size: %d", self.bufferSize);
+    err = AudioQueueAllocateBuffer(aq, self.bufferSize, &buffers[i]);
+    if (err != 0) {
+      NLLogNVPError(@"Error creating audio buffer: %ld", err);
+    }
+    
+    err = AudioQueueEnqueueBuffer(aq, buffers[i], 0, NULL);
+    if (err != 0) {
+      NLLogNVPError(@"AudioQueueEnqueueBuffer failed: %ld", err);
+    }
+  }
+  
+  return err == 0;
+}
+
+- (BOOL)start {
+  if (aq == NULL) {
+    NLLogNVPWarn(@"No audio queue was setup. Make sure you call setup first.");
+    return NO;
+  }
+  OSStatus err = AudioQueueStart(aq, NULL);
+  if (err != 0) {
+    NLLogNVPError(@"AudioQueueStart failed: %ld", err);
+  }
+  self.isRunning = err == 0;
+  
+  return err == 0;
+}
+
+- (void)dealloc {
+  if(aq) {
+    AudioQueueStop(aq, true);
+    AudioQueueDispose(aq, true);
+  }
+}
+
+- (void)stop {
+  if (self.isRunning) {
+    AudioQueueStop(aq, true);
+  }
+  
+  AudioQueueDispose(aq, true);
+  aq = NULL;
+  self.isRunning = NO;
+}
+
+- (float)getLevel {
+  AudioQueueLevelMeterState levelMeter;
+  UInt32 levelMeterSize = sizeof(AudioQueueLevelMeterState);
+  AudioQueueGetProperty(aq,
+                        (AudioQueuePropertyID)kAudioQueueProperty_CurrentLevelMeterDB,
+                        &levelMeter,
+                        &levelMeterSize);
+  
+  NLLogNVPInfo(@"L: peak=%f, avg=%f", levelMeter.mPeakPower, levelMeter.mAveragePower);
+  //NLLogNVPInfo(@"R: peak=%f, avg=%f", levelMeter[1].mPeakPower, levelMeter[1].mAveragePower);
+  
+  return levelMeter.mAveragePower;
+}
+
+- (void)audioQueueInputCallback:(AudioQueueRef)inAQ
+                       inBuffer:(AudioQueueBufferRef)inBuffer
+                    inStartTime:(const AudioTimeStamp *)inStartTime
+     inNumberPacketDescriptions:(UInt32)inNumberPacketDescriptions
+                  inPacketDescs:(const AudioStreamPacketDescription *)inPacketDescs
+{
+  AudioQueueBuffer bufferCopy = {0};
+  AudioQueueBufferRef bufferCopyRef = &bufferCopy;
+  memcpy(bufferCopyRef, inBuffer, sizeof(AudioQueueBuffer));
+  // requeue the buffer so it gets filled again
+  AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL);
+  if ([self.delegate respondsToSelector:@selector(audioQueueInputCallbackInBuffer:inStartTime:inNumberPacketDescriptions:inPacketDescs:)]) {
+    [self.delegate audioQueueInputCallbackInBuffer:bufferCopyRef inStartTime:inStartTime inNumberPacketDescriptions:inNumberPacketDescriptions inPacketDescs:inPacketDescs];
+  }
+}
+
+@end
diff --git a/NexusVideoPlayer/BitParser.h b/NexusVideoPlayer/BitParser.h
new file mode 100644
index 0000000..8e7d183
--- /dev/null
+++ b/NexusVideoPlayer/BitParser.h
@@ -0,0 +1,33 @@
+//
+//  BitParser.h
+//  Dropcam
+//
+//  Created by Loren Kirkby on 1/22/10.
+//  Copyright 2010 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+// Simple interface for reading bits out of a bitstream.
+// (Assumes bits are packed MSB first)
+//
+// Example:
+//    Given the bytes: 00010010 00010000
+//
+//      [parser readBits:5 into:val];
+//    will read the first 5 bits into 'val' (00010 -> 2) 
+//
+//      [parser readBits:4 into:val];
+//    will read the next 4 bits into 'val' (0100 -> 4) 
+//
+@interface BitParser : NSObject {
+  NSData *bytes;
+  int bitPos;
+}
+
++ (id)parserWithData:(NSData*)data;
+- (id)initWithData:(NSData*)data;
+- (BOOL)readBits:(int)numBits into:(uint32_t*)numVal;
+- (void)skipBits:(int)numBits;
+
+@end
diff --git a/NexusVideoPlayer/BitParser.m b/NexusVideoPlayer/BitParser.m
new file mode 100644
index 0000000..ccfb503
--- /dev/null
+++ b/NexusVideoPlayer/BitParser.m
@@ -0,0 +1,55 @@
+//
+//  BitParser.m
+//  Dropcam
+//
+//  Created by Loren Kirkby on 1/22/10.
+//  Copyright 2010 Dropcam. All rights reserved.
+//
+
+#import "BitParser.h"
+
+
+@implementation BitParser
+
++ (id)parserWithData:(NSData*)data {
+  return [[BitParser alloc] initWithData:data];
+}
+
+- (id)initWithData:(NSData*)data {
+  if ((self = [super init])) {
+    bytes = data;
+  }
+  
+  return self;
+}
+
+- (BOOL)readBits:(int)numBits into:(uint32_t*)numVal {
+  if (numBits > 32)
+    return NO;
+  
+  int val = 0;
+  for (int i = 0; i < numBits; i++) {
+    int bytePos = bitPos >> 3;
+    int offset = bitPos & 7;
+    
+    if (bytePos == [bytes length])
+      return NO;
+    
+    uint8_t byte;
+    NSRange range = { bytePos, 1 };
+    [bytes getBytes:&byte range:range];
+    if (byte & (1 << (7 - offset)))
+      val |= (1 << (numBits - i - 1));
+    
+    bitPos++;
+  }
+  
+  *numVal = val;
+  return YES;
+}
+
+- (void)skipBits:(int)numBits {
+  bitPos += numBits;
+}
+
+@end
diff --git a/NexusVideoPlayer/DCAdaptiveJitterBuffer.h b/NexusVideoPlayer/DCAdaptiveJitterBuffer.h
new file mode 100644
index 0000000..927f7c3
--- /dev/null
+++ b/NexusVideoPlayer/DCAdaptiveJitterBuffer.h
@@ -0,0 +1,44 @@
+//
+//  DCAdaptiveJitterBuffer.h
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 11/18/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+@interface Packet : NSObject
+@property (strong, nonatomic) NSData *data;
+@property (assign, nonatomic) int64_t sequenceNumber;
+@property (assign, nonatomic) int64_t PTS;
+@property (assign, nonatomic) int64_t timeAdded;
+@end
+
+@protocol JitterBufferPlayerDelegate;
+
+/// DCAdaptiveJitterBuffer is a fully synchronized wrapper around AdaptiveJitterBuffer
+@interface DCAdaptiveJitterBuffer : NSObject
+
+- (id)initWithTimebase:(int)timebase isLive:(BOOL)isLive isWifi:(BOOL)isWifi;
+
+- (void)setAutoAdjust:(BOOL)adjust;
+- (int64_t)targetDelay;
+- (void)setTargetDelay:(int64_t)delay;
+- (void)addBias:(int64_t)bias;
+- (int)numPackets;
+- (int64_t)bufferLength;
+- (BOOL)fullyBuffered;
+
+- (void)addPacket:(Packet*)packet;
+- (void)addSlaveTiming:(int64_t)PTS;
+- (int64_t)peekPTS;
+- (Packet *)nextPacket;
+
+- (BOOL)waitForPacket;
+- (void)cancelWaiters;
+
+/// @return how quickly/slowly this packet should be played
+- (float)playSpeedForPTS:(int64_t)PTS;
+
+@end
\ No newline at end of file
diff --git a/NexusVideoPlayer/DCAdaptiveJitterBuffer.mm b/NexusVideoPlayer/DCAdaptiveJitterBuffer.mm
new file mode 100644
index 0000000..710002e
--- /dev/null
+++ b/NexusVideoPlayer/DCAdaptiveJitterBuffer.mm
@@ -0,0 +1,164 @@
+//
+//  DCAdaptiveJitterBuffer.m
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 11/18/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#import "DCAdaptiveJitterBuffer.h"
+#import "NSCondition+UnlockAfter.h"
+
+#include "AdaptiveJitterBuffer.hpp"
+
+@implementation Packet
+@end
+
+@interface DCAdaptiveJitterBuffer ()
+@property (nonatomic, assign) AdaptiveJitterBuffer *buffer;
+@property (nonatomic, strong) NSCondition *packetCondition;
+@end
+
+@implementation DCAdaptiveJitterBuffer
+
+- (id)initWithTimebase:(int)timebase isLive:(BOOL)isLive isWifi:(BOOL)isWifi {
+  if ((self = [super init])) {
+    _buffer = new AdaptiveJitterBuffer(timebase, isLive == YES, isWifi == YES);
+    
+    _packetCondition = [[NSCondition alloc] init];
+  }
+  
+  return self;
+}
+
+- (void)dealloc {
+  delete _buffer;
+}
+
+- (void)setAutoAdjust:(BOOL)adjust {
+  [self.packetCondition unlockAfter:^{
+    _buffer->setAutoAdjust(adjust);
+  }];
+}
+
+- (int64_t)targetDelay {
+  __block int64_t rval;
+  [self.packetCondition unlockAfter:^{
+    rval = _buffer->getTargetDelay();
+  }];
+  return rval;
+}
+
+- (void)setTargetDelay:(int64_t)delay {
+  [self.packetCondition unlockAfter:^{
+    _buffer->setTargetDelay(delay);
+  }];
+}
+
+- (void)addBias:(int64_t)bias {
+  [self.packetCondition unlockAfter:^{
+    _buffer->addBias(bias);
+  }];
+}
+
+- (int)numPackets {
+  __block int rval;
+  [self.packetCondition unlockAfter:^{
+    rval = _buffer->getNumPackets();
+  }];
+  return rval;
+}
+
+- (int64_t)bufferLength {
+  __block int64_t rval;
+  [self.packetCondition unlockAfter:^{
+    rval = _buffer->getBufferLength();
+  }];
+  return rval;
+}
+
+- (BOOL)fullyBuffered {
+  __block BOOL rval;
+  [self.packetCondition unlockAfter:^{
+    rval = _buffer->getFullyBuffered();
+  }];
+  return rval;
+}
+
+- (void)addPacket:(Packet*)packet {
+  [self.packetCondition unlockAfter:^{
+    JBPacket in_pkt = {
+      .seq_num = packet.sequenceNumber,
+      .PTS = packet.PTS,
+      .span = 0,
+      // transfer ownership of packet to JB
+      .payload_data = (__bridge_retained void*)packet
+    };
+    _buffer->addPacket(in_pkt);
+    
+    [self.packetCondition signal];
+  }];
+}
+
+- (void)addSlaveTiming:(int64_t)PTS {
+  [self.packetCondition unlockAfter:^{
+    _buffer->addSlaveTiming(PTS);
+  }];
+}
+
+- (int64_t)peekPTS {
+  __block int64_t rval;
+  [self.packetCondition unlockAfter:^{
+    rval = _buffer->peekPTS();
+  }];
+  return rval;
+}
+
+- (Packet*)nextPacket {
+  __block Packet *packet = nil;
+  [self.packetCondition unlockAfter:^{
+    JBPacket out_pkt;
+    bool has_pkt = _buffer->nextPacket(out_pkt);
+    if (!has_pkt) {
+      return;
+    }
+    
+    // Take ownership of packet back from JB
+    packet = (__bridge_transfer Packet*)out_pkt.payload_data;
+  }];
+  return packet;
+}
+
+- (BOOL)waitForPacket {
+  __block BOOL rval = NO;
+  [self.packetCondition unlockAfter:^{
+    NSThread *thread = [NSThread currentThread];
+    while (![thread isCancelled]) {
+      [self.packetCondition wait];
+      if (_buffer->getNumPackets() == 0) {
+        continue;
+      }
+      else {
+        rval = YES;
+        return;
+      }
+    }
+  }];
+  return rval;
+}
+
+- (void)cancelWaiters {
+  [self.packetCondition unlockAfter:^{
+    [self.packetCondition broadcast];
+  }];
+}
+
+- (float)playSpeedForPTS:(int64_t)PTS {
+  __block float rval;
+  [self.packetCondition unlockAfter:^{
+    rval = _buffer->getPlaySpeedForPTS(PTS);
+  }];
+  return rval;
+}
+
+@end
diff --git a/NexusVideoPlayer/DCOpenGLRenderDispatchQueue.h b/NexusVideoPlayer/DCOpenGLRenderDispatchQueue.h
new file mode 100644
index 0000000..2605e1a
--- /dev/null
+++ b/NexusVideoPlayer/DCOpenGLRenderDispatchQueue.h
@@ -0,0 +1,17 @@
+//
+//  DCOpenGLRenderDispatchQueue.h
+//  NexusVideoPlayer
+//
+//  Created by Mike Montalbo on 4/15/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+//  Based on: https://github.com/BradLarson/GPUImage
+
+void runSynchronouslyOnVideoProcessingQueue(void (^block)(void));
+
+@interface DCOpenGLRenderDispatchQueue : NSObject
++ (DCOpenGLRenderDispatchQueue *)sharedRenderQueue;
++ (void *)contextKey;
+
+@property(readonly, nonatomic) dispatch_queue_t renderQueue;
+@end
diff --git a/NexusVideoPlayer/DCOpenGLRenderDispatchQueue.m b/NexusVideoPlayer/DCOpenGLRenderDispatchQueue.m
new file mode 100644
index 0000000..526e80c
--- /dev/null
+++ b/NexusVideoPlayer/DCOpenGLRenderDispatchQueue.m
@@ -0,0 +1,62 @@
+//
+//  DCOpenGLRenderDispatchQueue.m
+//  NexusVideoPlayer
+//
+//  Created by Mike Montalbo on 4/15/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+//  Based on: https://github.com/BradLarson/GPUImage
+
+#import "DCOpenGLRenderDispatchQueue.h"
+
+void runSynchronouslyOnVideoProcessingQueue(void (^block)(void))
+{
+  dispatch_queue_t videoProcessingQueue =
+  [DCOpenGLRenderDispatchQueue sharedRenderQueue].renderQueue;
+  
+	if(dispatch_get_specific([DCOpenGLRenderDispatchQueue contextKey]))
+	{
+		block();
+	}else
+	{
+		dispatch_sync(videoProcessingQueue, block);
+	}
+}
+
+@implementation DCOpenGLRenderDispatchQueue
+
+static void *openGLRenderQueueKey;
+
++ (DCOpenGLRenderDispatchQueue *)sharedRenderQueue
+{
+  static dispatch_once_t pred;
+  static DCOpenGLRenderDispatchQueue *sharedOpenGLRenderDispatchQueue = nil;
+  
+  dispatch_once(&pred, ^{
+    sharedOpenGLRenderDispatchQueue = [[[self class] alloc] init];
+  });
+  return sharedOpenGLRenderDispatchQueue;
+}
+
++ (void *)contextKey {
+	return openGLRenderQueueKey;
+}
+
+- (id)init;
+{
+  if (!(self = [super init]))
+  {
+		return nil;
+  }
+  
+	openGLRenderQueueKey = &openGLRenderQueueKey;
+  _renderQueue = dispatch_queue_create("com.dropcam.dropcamios.renderQueue",
+                                       NULL);
+	dispatch_queue_set_specific(_renderQueue,
+                              openGLRenderQueueKey,
+                              (__bridge void *)self,
+                              NULL);
+  
+  return self;
+}
+@end
diff --git a/NexusVideoPlayer/DCTimestampSync.h b/NexusVideoPlayer/DCTimestampSync.h
new file mode 100644
index 0000000..e414ebc
--- /dev/null
+++ b/NexusVideoPlayer/DCTimestampSync.h
@@ -0,0 +1,17 @@
+//
+//  DCTimestampSync.h
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 11/18/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+@interface DCTimestampSync : NSObject
+
+- (id)initWithMasterTimebase:(int)mtb slaveTimebase:(int)stb;
+- (int64_t)translateFrom:(int64_t)masterTS;
+- (int64_t)translateToMasterFrom:(int64_t)slaveTS;
+
+@end
diff --git a/NexusVideoPlayer/DCTimestampSync.m b/NexusVideoPlayer/DCTimestampSync.m
new file mode 100644
index 0000000..49ed561
--- /dev/null
+++ b/NexusVideoPlayer/DCTimestampSync.m
@@ -0,0 +1,42 @@
+//
+//  DCTimestampSync.m
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 11/18/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#import "DCTimestampSync.h"
+
+@interface DCTimestampSync () {
+  int _masterTimebase;
+  int _masterLastTimestamp;
+  int _slaveTimebase;
+  int _slaveLastTimestamp;
+}
+@end
+
+@implementation DCTimestampSync
+
+- (id)initWithMasterTimebase:(int)mtb slaveTimebase:(int)stb {
+  if ((self = [super init]) != nil) {
+    _masterTimebase = mtb;
+    _masterLastTimestamp = 0;
+    _slaveTimebase = stb;
+    _slaveLastTimestamp = 0;
+  }
+  
+  return self;
+}
+
+- (int64_t)translateFrom:(int64_t)masterTS {
+  double masterSecs = (double)(masterTS - _masterLastTimestamp) / _masterTimebase;
+  return (masterSecs * _slaveTimebase) + _slaveLastTimestamp;
+}
+
+- (int64_t)translateToMasterFrom:(int64_t)slaveTS {
+  double slaveSecs = (double)(slaveTS - _slaveLastTimestamp) / _slaveTimebase;
+  return (slaveSecs * _masterTimebase) + _masterLastTimestamp;
+}
+
+@end
diff --git a/NexusVideoPlayer/GLRenderView.h b/NexusVideoPlayer/GLRenderView.h
new file mode 100644
index 0000000..c2498a8
--- /dev/null
+++ b/NexusVideoPlayer/GLRenderView.h
@@ -0,0 +1,28 @@
+//
+//  EAGLView.h
+//  GLTest3
+//
+//  Created by Dropcam Build on 10/13/12.
+//  Copyright 2012 __MyCompanyName__. All rights reserved.
+//
+
+#import <UIKit/UIKit.h>
+
+#import <OpenGLES/ES1/gl.h>
+#import <OpenGLES/ES1/glext.h>
+#import <OpenGLES/ES2/gl.h>
+#import <OpenGLES/ES2/glext.h>
+
+// This class wraps the CAEAGLLayer from CoreAnimation into a convenient UIView subclass.
+// The view content is basically an EAGL surface you render your OpenGL scene into.
+// Note that setting the view non-opaque will only work if the EAGL surface has an alpha channel.
+@interface GLRenderView : UIView
+
+@property (nonatomic, retain) EAGLContext *context;
+@property (atomic, readonly) BOOL active;
+
+- (void)setFramebuffer;
+- (BOOL)presentFramebuffer;
+- (UIImage*)snapshot;
+
+@end
diff --git a/NexusVideoPlayer/GLRenderView.m b/NexusVideoPlayer/GLRenderView.m
new file mode 100644
index 0000000..a7025ec
--- /dev/null
+++ b/NexusVideoPlayer/GLRenderView.m
@@ -0,0 +1,283 @@
+//
+//  EAGLView.m
+//  GLTest3
+//
+//  Created by Dropcam Build on 10/13/12.
+//  Copyright 2012 Dropcam. All rights reserved.
+//
+
+#import <QuartzCore/QuartzCore.h>
+#import "DCOpenGLRenderDispatchQueue.h"
+#import "NLCommonLoggingNVP.h"
+
+#import "GLRenderView.h"
+
+@interface GLRenderView () {
+  EAGLContext *context;
+  
+  // The pixel dimensions of the CAEAGLLayer.
+  GLint framebufferWidth;
+  GLint framebufferHeight;
+  
+  // The OpenGL ES names for the framebuffer and renderbuffer used to render to this view.
+  GLuint defaultFramebuffer, colorRenderbuffer;
+}
+- (void)createFramebuffer;
+- (void)deleteFramebuffer;
+@end
+
+@implementation GLRenderView
+
+@dynamic context;
+
+// You must implement this method
++ (Class)layerClass
+{
+  return [CAEAGLLayer class];
+}
+
+- (id)initWithFrame:(CGRect)frame
+{
+  self = [super initWithFrame:frame];
+	if (self)
+  {
+    defaultFramebuffer = 0;
+    colorRenderbuffer = 0;
+    
+    CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
+    
+    eaglLayer.opaque = TRUE;
+    eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
+                                    [NSNumber numberWithBool:FALSE], kEAGLDrawablePropertyRetainedBacking,
+                                    kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat,
+                                    nil];
+  }
+  
+  return self;
+}
+
+- (void)dealloc
+{
+  [[NSNotificationCenter defaultCenter] removeObserver:self];
+  [self deleteFramebuffer];
+  [context release];
+  
+  [super dealloc];
+}
+
+- (EAGLContext *)context
+{
+  return context;
+}
+
+- (void)setContext:(EAGLContext *)newContext
+{
+  if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
+    NLLogNVPWarn(@"%@ called while app in background. Ignoring.",NSStringFromSelector(_cmd));
+    return;
+  }
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    if (context != newContext)
+    {
+      [self deleteFramebuffer];
+      
+      [context release];
+      context = [newContext retain];
+      
+      [EAGLContext setCurrentContext:nil];
+    }
+  });
+}
+
+- (void)createFramebuffer
+{
+  if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
+    NLLogNVPWarn(@"%@ called while app in background. Ignoring.",NSStringFromSelector(_cmd));
+    return;
+  }
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    if (context && !defaultFramebuffer)
+    {
+      [EAGLContext setCurrentContext:context];
+      
+      // Create default framebuffer object.
+      glGenFramebuffers(1, &defaultFramebuffer);
+      glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);
+      
+      // Create color render buffer and allocate backing store.
+      glGenRenderbuffers(1, &colorRenderbuffer);
+      glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
+      [context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer];
+      glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &framebufferWidth);
+      glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &framebufferHeight);
+      
+      glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorRenderbuffer);
+      
+      if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
+        NLLogNVPWarn(@"Failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
+    }
+  });
+}
+
+- (void)deleteFramebuffer
+{
+  if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
+    NLLogNVPWarn(@"%@ called while app in background. Ignoring.",NSStringFromSelector(_cmd));
+    return;
+  }
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    if (context)
+    {
+      [EAGLContext setCurrentContext:context];
+      
+      if (defaultFramebuffer)
+      {
+        glDeleteFramebuffers(1, &defaultFramebuffer);
+        defaultFramebuffer = 0;
+      }
+      
+      if (colorRenderbuffer)
+      {
+        glDeleteRenderbuffers(1, &colorRenderbuffer);
+        colorRenderbuffer = 0;
+      }
+    }
+  });
+}
+
+- (void)setFramebuffer
+{
+  if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
+    NLLogNVPWarn(@"%@ called while app in background. Ignoring.",NSStringFromSelector(_cmd));
+    return;
+  }
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    if (context)
+    {
+      [EAGLContext setCurrentContext:context];
+      
+      if (!defaultFramebuffer)
+        [self createFramebuffer];
+      
+      glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);
+      
+      glViewport(0, 0, framebufferWidth, framebufferHeight);
+    }
+  });
+}
+
+- (BOOL)presentFramebuffer
+{
+  if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
+    NLLogNVPWarn(@"%@ called while app in background. Ignoring.",NSStringFromSelector(_cmd));
+    return NO;
+  }
+  
+  __block BOOL success;
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    if (context)
+    {
+      [EAGLContext setCurrentContext:context];
+      
+      glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
+      
+      success = [context presentRenderbuffer:GL_RENDERBUFFER];
+    }
+  });
+  
+  return success;
+}
+
+// IMPORTANT: Call this method after you draw and before -presentRenderbuffer:.
+- (UIImage*)snapshot
+{
+  if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
+    NLLogNVPWarn(@"%@ called while app in background. Ignoring.",NSStringFromSelector(_cmd));
+    return nil;
+  }
+  
+  __block UIImage *image;
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    GLint backingWidth, backingHeight;
+    
+    // Bind the color renderbuffer used to render the OpenGL ES view
+    // If your application only creates a single color renderbuffer which is already bound at this point,
+    // this call is redundant, but it is needed if you're dealing with multiple renderbuffers.
+    // Note, replace "_colorRenderbuffer" with the actual name of the renderbuffer object defined in your class.
+    glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
+    
+    // Get the size of the backing CAEAGLLayer
+    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
+    glGetRenderbufferParameterivOES(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
+    
+    NSInteger x = 0, y = 0, width = backingWidth, height = backingHeight;
+    NSInteger dataLength = width * height * 4;
+    GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
+    
+    // Read pixel data from the framebuffer
+    glPixelStorei(GL_PACK_ALIGNMENT, 4);
+    glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
+    
+    // Create a CGImage with the pixel data
+    // If your OpenGL ES content is opaque, use kCGImageAlphaNoneSkipLast to ignore the alpha channel
+    // otherwise, use kCGImageAlphaPremultipliedLast
+    CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
+    CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
+    CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaNoneSkipLast,
+                                    ref, NULL, true, kCGRenderingIntentDefault);
+    
+    // OpenGL ES measures data in PIXELS
+    // Create a graphics context with the target size measured in POINTS
+    NSInteger widthInPoints, heightInPoints;
+    if (NULL != UIGraphicsBeginImageContextWithOptions) {
+      // On iOS 4 and later, use UIGraphicsBeginImageContextWithOptions to take the scale into consideration
+      // Set the scale parameter to your OpenGL ES view's contentScaleFactor
+      // so that you get a high-resolution snapshot when its value is greater than 1.0
+      CGFloat scale = self.contentScaleFactor;
+      widthInPoints = width / scale;
+      heightInPoints = height / scale;
+      UIGraphicsBeginImageContextWithOptions(CGSizeMake(widthInPoints, heightInPoints), NO, scale);
+    }
+    else {
+      // On iOS prior to 4, fall back to use UIGraphicsBeginImageContext
+      widthInPoints = width;
+      heightInPoints = height;
+      UIGraphicsBeginImageContext(CGSizeMake(widthInPoints, heightInPoints));
+    }
+    
+    CGContextRef cgcontext = UIGraphicsGetCurrentContext();
+    
+    // UIKit coordinate system is upside down to GL/Quartz coordinate system
+    // Flip the CGImage by rendering it to the flipped bitmap context
+    // The size of the destination area is measured in POINTS
+    CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
+    CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, widthInPoints, heightInPoints), iref);
+    
+    // Retrieve the UIImage from the current context
+    image = UIGraphicsGetImageFromCurrentImageContext();
+    
+    UIGraphicsEndImageContext();
+    
+    // Clean up
+    free(data);
+    CFRelease(ref);
+    CFRelease(colorspace);
+    CGImageRelease(iref);
+  });
+  
+  return image;
+}
+
+- (void)layoutSubviews
+{
+  [self deleteFramebuffer];
+  [self createFramebuffer];
+}
+
+@end
diff --git a/NexusVideoPlayer/GLRenderViewController.h b/NexusVideoPlayer/GLRenderViewController.h
new file mode 100644
index 0000000..88c4c13
--- /dev/null
+++ b/NexusVideoPlayer/GLRenderViewController.h
@@ -0,0 +1,18 @@
+//
+//  GLViewController.h
+//  NexusVideoPlayer
+//
+//  Created by Mike Montalbo on 6/1/12.
+//  Copyright (c) 2012 Dropcam. All rights reserved.
+//
+
+#import "DecodedFrame.h"
+
+@interface GLRenderViewController : UIViewController
+
+- (void)presentFrame:(DecodedFrame *)decodedFrame;
+- (UIImage *)snapshot;
+- (void)refreshView;
+- (void)clearView;
+
+@end
diff --git a/NexusVideoPlayer/GLRenderViewController.m b/NexusVideoPlayer/GLRenderViewController.m
new file mode 100644
index 0000000..9be2516
--- /dev/null
+++ b/NexusVideoPlayer/GLRenderViewController.m
@@ -0,0 +1,680 @@
+//
+//  GLViewController.m
+//  NexusVideoPlayer
+//
+//  Created by Mike Montalbo on 6/1/12.
+//  Copyright (c) 2012 Dropcam. All rights reserved.
+//
+
+#import "ImageUtil.h"
+#import "GLRenderViewController.h"
+#import "DebugLog.h"
+#import "GLRenderView.h"
+#import "NLCommonLoggingNVP.h"
+
+#import "DCOpenGLRenderDispatchQueue.h"
+
+// Uniform index.
+enum
+{
+  UNIFORM_YUV420_Y,
+  UNIFORM_YUV420_U,
+  UNIFORM_YUV420_V,
+  UNIFORM_NV12_Y,
+  UNIFORM_NV12_UV,
+  UNIFORM_NV12_CONVERSION_MATRIX,
+  NUM_UNIFORMS
+};
+GLint uniforms[NUM_UNIFORMS];
+
+// Attribute index.
+enum
+{
+  ATTRIB_VERTEX,
+  ATTRIB_TEXCOORD,
+  NUM_ATTRIBUTES
+};
+
+// BT.601, which is the standard for SDTV.
+static const GLfloat kColorConversion601[] = {
+  1.164,  1.164, 1.164,
+  0.0, -0.392, 2.017,
+  1.596, -0.813,   0.0,
+};
+
+// BT.709, which is the standard for HDTV.
+static const GLfloat kColorConversion709[] = {
+  1.164,  1.164, 1.164,
+  0.0, -0.213, 2.112,
+  1.793, -0.533,   0.0,
+};
+
+
+@interface GLRenderViewController () {
+  GLuint _yuv420Program;
+  GLuint _yuv420yTexture, _yuv420uTexture, _yuv420vTexture;
+  
+  GLuint _nv12Program;
+  CVOpenGLESTextureRef _nv12yTexture;
+  CVOpenGLESTextureRef _nv12uvTexture;
+  CVOpenGLESTextureCacheRef _videoTextureCache;
+
+  size_t _textureWidth;
+  size_t _textureHeight;
+}
+
+- (void)setupBuffers;
+@property (strong, atomic) EAGLContext *context;
+@property (atomic) CGRect drawRect;
+@end
+
+@implementation GLRenderViewController
+@synthesize context = _context;
+
+- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil {
+  if((self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil])) {
+    _textureHeight = 0;
+    _textureWidth = 0;
+  }
+  
+  return self;
+}
+
+- (void)dealloc {
+  [[NSNotificationCenter defaultCenter] removeObserver:self];
+  
+  [self tearDownGL];
+}
+
+- (void)loadView {
+  GLRenderView *view = [[GLRenderView alloc] initWithFrame:CGRectMake(0, 0, 0, 0)];
+  self.view = view;
+}
+
+- (void)viewDidLoad {
+  [super viewDidLoad];
+  
+  [[NSNotificationCenter defaultCenter] addObserver:self
+                                           selector:@selector(applicationWillResignActive:)
+                                               name:UIApplicationWillResignActiveNotification
+                                             object:nil];
+  
+  [[NSNotificationCenter defaultCenter] addObserver:self
+                                           selector:@selector(applicationWillEnterForeground:)
+                                               name:UIApplicationWillEnterForegroundNotification
+                                             object:nil];
+  
+  [self setupGL];
+}
+
+- (void)applicationWillResignActive:(NSNotification *)notificaiton {
+  [self tearDownGL];
+}
+
+- (void)applicationWillEnterForeground:(NSNotification *)notification {
+  [self setupGL];
+}
+
+- (void)setupGL {
+  if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
+    NLLogNVPWarn(@"%@ called while app in background. Ignoring.",NSStringFromSelector(_cmd));
+    return;
+  }
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
+    
+    if (!self.context) {
+      NLLogNVPWarn(@"Failed to create ES context");
+    }
+    
+    GLRenderView *view = (GLRenderView *)self.view;
+    view.context = self.context;
+    
+    [EAGLContext setCurrentContext:self.context];
+    
+    [self loadShaders];
+    
+    glUseProgram(_yuv420Program);
+    [self checkError];
+    
+    glUniform1i(uniforms[UNIFORM_YUV420_Y], 0);
+    glUniform1i(uniforms[UNIFORM_YUV420_U], 1);
+    glUniform1i(uniforms[UNIFORM_YUV420_V], 2);
+    [self checkError];
+    
+    glActiveTexture(GL_TEXTURE0);
+    glGenTextures(1, &_yuv420yTexture);
+    glBindTexture(GL_TEXTURE_2D, _yuv420yTexture);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+    glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    
+    glActiveTexture(GL_TEXTURE1);
+    glGenTextures(1, &_yuv420uTexture);
+    glBindTexture(GL_TEXTURE_2D, _yuv420uTexture);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+    glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    
+    glActiveTexture(GL_TEXTURE2);
+    glGenTextures(1, &_yuv420vTexture);
+    glBindTexture(GL_TEXTURE_2D, _yuv420vTexture);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+    glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    
+    // Create CVOpenGLESTextureCacheRef for optimal CVPixelBufferRef to GLES texture conversion.
+    if (!_videoTextureCache) {
+      CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache);
+      if (err != noErr) {
+        NLLogNVPError(@"Error at CVOpenGLESTextureCacheCreate %d", err);
+        return;
+      }
+    }
+
+    [EAGLContext setCurrentContext:nil];
+  });
+}
+
+- (void)tearDownGL {
+  if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
+    NLLogNVPWarn(@"%@ called while app in background. Ignoring.",NSStringFromSelector(_cmd));
+    return;
+  }
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    if (self.context) {
+      [EAGLContext setCurrentContext:self.context];
+      
+      if (_yuv420Program) {
+        glDeleteProgram(_yuv420Program);
+        _yuv420Program = 0;
+      }
+      
+      if (_yuv420yTexture) {
+        glDeleteTextures(1, &_yuv420yTexture);
+        _yuv420yTexture = 0;
+      }
+      
+      if (_yuv420uTexture) {
+        glDeleteTextures(1, &_yuv420uTexture);
+        _yuv420uTexture = 0;
+      }
+      
+      if (_yuv420vTexture) {
+        glDeleteTextures(1, &_yuv420vTexture);
+        _yuv420vTexture = 0;
+      }
+      
+      [EAGLContext setCurrentContext:nil];
+      self.context = nil;
+      
+      GLRenderView *view = (GLRenderView *)self.view;
+      view.context = nil;
+      
+      glFinish();
+    }
+  });
+}
+
+- (void)setupBuffers {
+  if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
+    NLLogNVPWarn(@"%@ called while app in background. Ignoring.",NSStringFromSelector(_cmd));
+    return;
+  }
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    [EAGLContext setCurrentContext:self.context];
+    
+    const GLfloat squareVertices[] = {
+      1.0f,  1.0f,
+      -1.0f, 1.0f,
+      1.0f,   -1.0f,
+      -1.0f,  -1.0f,
+    };
+    
+    const GLfloat squareTex[] = {
+      1.0, 0.0,
+      0.0, 0.0,
+      1.0, 1.0,
+      0.0, 1.0,
+    };
+    
+    glClearColor(0.97f, 0.97f, 0.97f, 1.0f);
+    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
+    
+    // update attribute values
+    glEnableVertexAttribArray(ATTRIB_VERTEX);
+    glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
+    
+    glEnableVertexAttribArray(ATTRIB_TEXCOORD);
+    glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2*sizeof(GLfloat), squareTex);
+  });
+}
+
+- (void)updateTexturesFromMemBuffers:(DecodedFrame *)decodedFrame {
+  if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
+    NLLogNVPWarn(@"%@ called while app in background. Ignoring.",NSStringFromSelector(_cmd));
+    return;
+  }
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    int width = decodedFrame.width;
+    int height = decodedFrame.height;
+    
+    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+    
+    glUseProgram(_yuv420Program);
+    [self checkError];
+    
+    glUniform1i(uniforms[UNIFORM_YUV420_Y], 0);
+    glUniform1i(uniforms[UNIFORM_YUV420_U], 1);
+    glUniform1i(uniforms[UNIFORM_YUV420_V], 2);
+    [self checkError];
+    
+    if (width != _textureWidth || height != _textureHeight)
+    {
+      _textureWidth = width;
+      _textureHeight = height;
+      
+      [self setupBuffers];
+      
+      // Create new textures with the right size
+      glActiveTexture(GL_TEXTURE0);
+      glBindTexture(GL_TEXTURE_2D, _yuv420yTexture);
+      glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, _textureWidth, _textureHeight, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, [decodedFrame.yPlane bytes]);
+      
+      glActiveTexture(GL_TEXTURE1);
+      glBindTexture(GL_TEXTURE_2D, _yuv420uTexture);
+      glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, _textureWidth/2, _textureHeight/2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, [decodedFrame.uPlane bytes]);
+      
+      glActiveTexture(GL_TEXTURE2);
+      glBindTexture(GL_TEXTURE_2D, _yuv420vTexture);
+      glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, _textureWidth/2, _textureHeight/2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, [decodedFrame.vPlane bytes]);
+    } else {
+      // Update existing textures as size has not changed
+      glActiveTexture(GL_TEXTURE0);
+      glBindTexture(GL_TEXTURE_2D, _yuv420yTexture);
+      glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, _textureWidth, _textureHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, [decodedFrame.yPlane bytes]);
+      
+      glActiveTexture(GL_TEXTURE1);
+      glBindTexture(GL_TEXTURE_2D, _yuv420uTexture);
+      glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, _textureWidth/2, _textureHeight/2, GL_LUMINANCE, GL_UNSIGNED_BYTE, [decodedFrame.uPlane bytes]);
+      
+      glActiveTexture(GL_TEXTURE2);
+      glBindTexture(GL_TEXTURE_2D, _yuv420vTexture);
+      glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, _textureWidth/2, _textureHeight/2, GL_LUMINANCE, GL_UNSIGNED_BYTE, [decodedFrame.vPlane bytes]);
+    }
+  });
+}
+
+- (void)cleanupTextureCache {
+  if (_nv12yTexture) {
+    CFRelease(_nv12yTexture);
+    _nv12yTexture = NULL;
+  }
+  
+  if (_nv12uvTexture) {
+    CFRelease(_nv12uvTexture);
+    _nv12uvTexture = NULL;
+  }
+  
+  // Periodic texture cache flush every frame
+  CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
+}
+
+- (void)updateTexturesFromImageBuffer:(CVImageBufferRef)imageBuffer {
+  if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
+    NLLogNVPWarn(@"%@ called while app in background. Ignoring.",NSStringFromSelector(_cmd));
+    return;
+  }
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    CVReturn err;
+    
+    /*
+     Use the color attachment of the pixel buffer to determine the appropriate color conversion matrix.
+     */
+    CFTypeRef colorAttachments = CVBufferGetAttachment(imageBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
+    
+    const GLfloat *conversionMatrix;
+    if (colorAttachments == kCVImageBufferYCbCrMatrix_ITU_R_601_4) {
+      conversionMatrix = kColorConversion601;
+    }
+    else {
+      conversionMatrix = kColorConversion709;
+    }
+    
+    glUseProgram(_nv12Program);
+    [self checkError];
+    
+    glUniform1i(uniforms[UNIFORM_NV12_Y], 0);
+    glUniform1i(uniforms[UNIFORM_NV12_UV], 1);
+    glUniformMatrix3fv(uniforms[UNIFORM_NV12_CONVERSION_MATRIX], 1, GL_FALSE, conversionMatrix);
+    [self checkError];
+    
+    int width = (int)CVPixelBufferGetWidth(imageBuffer);
+    int height = (int)CVPixelBufferGetHeight(imageBuffer);
+
+    if (width != _textureWidth || height != _textureHeight)
+    {
+      _textureWidth = width;
+      _textureHeight = height;
+      
+      [self setupBuffers];
+    }
+    
+    [self cleanupTextureCache];
+    
+    // CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture
+    // optimally from CVImageBufferRef.
+    
+    // Y-plane
+    glActiveTexture(GL_TEXTURE0);
+    err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
+                                                       _videoTextureCache,
+                                                       imageBuffer,
+                                                       NULL,
+                                                       GL_TEXTURE_2D,
+                                                       GL_RED_EXT,
+                                                       _textureWidth,
+                                                       _textureHeight,
+                                                       GL_RED_EXT,
+                                                       GL_UNSIGNED_BYTE,
+                                                       0,
+                                                       &_nv12yTexture);
+    if (err) {
+      NLLogNVPError(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
+    }
+    
+    glBindTexture(CVOpenGLESTextureGetTarget(_nv12yTexture), CVOpenGLESTextureGetName(_nv12yTexture));
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+    
+    // UV-plane
+    glActiveTexture(GL_TEXTURE1);
+    err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
+                                                       _videoTextureCache,
+                                                       imageBuffer,
+                                                       NULL,
+                                                       GL_TEXTURE_2D,
+                                                       GL_RG_EXT,
+                                                       _textureWidth/2,
+                                                       _textureHeight/2,
+                                                       GL_RG_EXT,
+                                                       GL_UNSIGNED_BYTE,
+                                                       1,
+                                                       &_nv12uvTexture);
+    if (err) {
+      NLLogNVPError(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
+    }
+    
+    glBindTexture(CVOpenGLESTextureGetTarget(_nv12uvTexture), CVOpenGLESTextureGetName(_nv12uvTexture));
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+    
+  });
+}
+
+- (void)render {
+  if ([UIApplication sharedApplication].applicationState == UIApplicationStateBackground) {
+    NLLogNVPWarn(@"%@ called while app in background. Ignoring.",NSStringFromSelector(_cmd));
+    return;
+  }
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
+    
+    if (_textureWidth > 0 && _textureHeight > 0) {
+      int framebufferWidth;
+      int framebufferHeight;
+      glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &framebufferWidth);
+      glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &framebufferHeight);
+      
+      CGRect videoRect = CGRectMake(0, 0, framebufferWidth, framebufferHeight);
+      
+      switch (self.view.contentMode) {
+        case UIViewContentModeScaleAspectFit:
+          // Calculate a centered rect with the correct aspect ratio and set the GL viewport
+          videoRect = [ImageUtil calcCenterFittedRectWithBitmapWidth:_textureWidth
+                                                        bitmapHeight:_textureHeight
+                                                         canvasWidth:framebufferWidth
+                                                        canvasHeight:framebufferHeight];
+          break;
+          
+        case UIViewContentModeScaleToFill:
+          break;
+          
+        default:
+          NLLogNVPWarn(@"Warning: unsupported contentMode in GLRenderViewController");
+          break;
+      }
+      
+      glViewport(videoRect.origin.x, videoRect.origin.y, videoRect.size.width, videoRect.size.height);
+      
+      glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
+    }
+  });
+}
+
+- (void)presentFrame:(DecodedFrame *)decodedFrame {
+  runSynchronouslyOnVideoProcessingQueue(^{
+    if (self.context) {
+      [EAGLContext setCurrentContext:self.context];
+      
+      if (decodedFrame.imageBuffer != NULL) {
+        [self updateTexturesFromImageBuffer:decodedFrame.imageBuffer];
+      }
+      else {
+        [self updateTexturesFromMemBuffers:decodedFrame];
+      }
+    
+      GLRenderView *view = (GLRenderView*)self.view;
+      [view setFramebuffer];
+      [self render];
+      [view presentFramebuffer];
+      
+      [EAGLContext setCurrentContext:nil];
+    }
+  });
+}
+
+- (UIImage *)snapshot {
+  __block UIImage *s;
+  
+  runSynchronouslyOnVideoProcessingQueue(^{
+    if (self.context) {
+      GLRenderView *view = (GLRenderView*)self.view;
+      
+      [EAGLContext setCurrentContext:self.context];
+      [view setFramebuffer];
+      [self render];
+      s = [view snapshot];
+      [EAGLContext setCurrentContext:nil];
+    }
+  });
+  
+  return s;
+}
+
+- (void)refreshView {
+  runSynchronouslyOnVideoProcessingQueue(^{
+    if (self.context) {
+      [EAGLContext setCurrentContext:self.context];
+      GLRenderView *view = (GLRenderView*)self.view;
+      [view setFramebuffer];
+      [self render];
+      [view presentFramebuffer];
+      [EAGLContext setCurrentContext:nil];
+    }
+  });
+}
+
+- (void)clearView {
+  runSynchronouslyOnVideoProcessingQueue(^{
+    if (self.context) {
+      [EAGLContext setCurrentContext:self.context];
+      GLRenderView *view = (GLRenderView*)self.view;
+      [view setFramebuffer];
+      glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
+      [view presentFramebuffer];
+      [EAGLContext setCurrentContext:nil];
+    }
+  });
+}
+
+#pragma mark - OpenGL ES 2 shader compilation
+
+//
+// These methods are already run on the video processinging queue in setupGL.
+//
+- (void)checkError {
+  GLenum err;
+  
+  if ((err = glGetError()) != GL_NO_ERROR) {
+    NLLogNVPError(@"OpenGL Error: %d", err);
+  }
+}
+
+- (GLuint)loadShaderWithVertexName:(NSString*)vertexName andFragmentName:(NSString*)fragmentName {
+  GLuint vertShader, fragShader;
+  NSString *vertShaderPathname, *fragShaderPathname;
+  
+  // Create shader program.
+  GLuint program = glCreateProgram();
+  
+  // Create and compile vertex shader.
+  vertShaderPathname = [[NSBundle mainBundle] pathForResource:vertexName ofType:@"vsh"];
+  if (![self compileShader:&vertShader type:GL_VERTEX_SHADER file:vertShaderPathname]) {
+    NLLogNVPWarn(@"Failed to compile vertex shader");
+    return 0;
+  }
+  
+  // Create and compile fragment shader.
+  fragShaderPathname = [[NSBundle mainBundle] pathForResource:fragmentName ofType:@"fsh"];
+  if (![self compileShader:&fragShader type:GL_FRAGMENT_SHADER file:fragShaderPathname]) {
+    NLLogNVPWarn(@"Failed to compile fragment shader");
+    return 0;
+  }
+  
+  // Attach vertex shader to program.
+  glAttachShader(program, vertShader);
+  
+  // Attach fragment shader to program.
+  glAttachShader(program, fragShader);
+  
+  // Bind attribute locations.
+  // This needs to be done prior to linking.
+  glBindAttribLocation(program, ATTRIB_VERTEX, "position");
+  glBindAttribLocation(program, ATTRIB_TEXCOORD, "texCoord");
+  
+  // Link program.
+  if (![self linkProgram:program]) {
+    NLLogNVPWarn(@"Failed to link program: %d", program);
+    
+    if (vertShader) {
+      glDeleteShader(vertShader);
+      vertShader = 0;
+    }
+    if (fragShader) {
+      glDeleteShader(fragShader);
+      fragShader = 0;
+    }
+    if (program) {
+      glDeleteProgram(program);
+      program = 0;
+    }
+    
+    return 0;
+  }
+  
+  // Release vertex and fragment shaders.
+  if (vertShader) {
+    glDetachShader(program, vertShader);
+    glDeleteShader(vertShader);
+  }
+  if (fragShader) {
+    glDetachShader(program, fragShader);
+    glDeleteShader(fragShader);
+  }
+  
+  return program;
+}
+
+- (BOOL)loadShaders {
+  _yuv420Program = [self loadShaderWithVertexName:@"Shader" andFragmentName:@"Shader_YUV420"];
+  
+  uniforms[UNIFORM_YUV420_Y] = glGetUniformLocation(_yuv420Program, "SamplerY");
+  uniforms[UNIFORM_YUV420_U] = glGetUniformLocation(_yuv420Program, "SamplerU");
+  uniforms[UNIFORM_YUV420_V] = glGetUniformLocation(_yuv420Program, "SamplerV");
+  
+  _nv12Program = [self loadShaderWithVertexName:@"Shader" andFragmentName:@"Shader_NV12"];
+  
+  uniforms[UNIFORM_NV12_Y] = glGetUniformLocation(_nv12Program, "SamplerY");
+  uniforms[UNIFORM_NV12_UV] = glGetUniformLocation(_nv12Program, "SamplerUV");
+  uniforms[UNIFORM_NV12_CONVERSION_MATRIX] = glGetUniformLocation(_nv12Program, "colorConversionMatrix");
+  
+  return YES;
+}
+
+- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file {
+  GLint status;
+  const GLchar *source;
+  
+  source = (GLchar *)[[NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:nil] UTF8String];
+  if (!source) {
+    NLLogNVPWarn(@"Failed to load vertex shader");
+    return NO;
+  }
+  
+  *shader = glCreateShader(type);
+  glShaderSource(*shader, 1, &source, NULL);
+  glCompileShader(*shader);
+  
+#if defined(DEBUG)
+  GLint logLength;
+  glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
+  if (logLength > 0) {
+    GLchar *log = (GLchar *)malloc(logLength);
+    glGetShaderInfoLog(*shader, logLength, &logLength, log);
+    NLLogNVPWarn(@"Shader compile log:\n%s", log);
+    free(log);
+  }
+#endif
+  
+  glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
+  if (status == 0) {
+    glDeleteShader(*shader);
+    return NO;
+  }
+  
+  return YES;
+}
+
+- (BOOL)linkProgram:(GLuint)prog {
+  GLint status;
+  glLinkProgram(prog);
+  
+#if defined(DEBUG)
+  GLint logLength;
+  glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
+  if (logLength > 0) {
+    GLchar *log = (GLchar *)malloc(logLength);
+    glGetProgramInfoLog(prog, logLength, &logLength, log);
+    NLLogNVPWarn(@"Program link log:\n%s", log);
+    free(log);
+  }
+#endif
+  
+  glGetProgramiv(prog, GL_LINK_STATUS, &status);
+  if (status == 0) {
+    return NO;
+  }
+  
+  return YES;
+}
+@end
diff --git a/NexusVideoPlayer/H264.h b/NexusVideoPlayer/H264.h
new file mode 100644
index 0000000..7e1d84d
--- /dev/null
+++ b/NexusVideoPlayer/H264.h
@@ -0,0 +1,33 @@
+//
+//  H264.h
+//  Dropcam
+//
+//  Created by Loren Kirkby on 12/18/09.
+//  Copyright 2009 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+#define H264_NAL_SLICE_TYPE 1
+#define H264_NAL_IDR_TYPE 5
+#define H264_NAL_SPS_TYPE 7
+#define H264_NAL_PPS_TYPE 8
+
+typedef enum {
+  SLICE_P = 0,
+  SLICE_B,
+  SLICE_I,
+  SLICE_SP,
+  SLICE_SI,
+  SLICE_P_ONLY,
+  SLICE_B_ONLY,
+  SLICE_I_ONLY,
+  SLICE_SP_ONLY,
+  SLICE_SI_ONLY,
+  
+  SLICE_INVALID = -1
+} H264SliceType;
+
+BOOL isStartPacket(NSData *packet);
+uint8_t get_h264_nalu_type(NSData *packet);
+H264SliceType getH264SliceType(NSData *packet);
\ No newline at end of file
diff --git a/NexusVideoPlayer/H264.m b/NexusVideoPlayer/H264.m
new file mode 100644
index 0000000..5181099
--- /dev/null
+++ b/NexusVideoPlayer/H264.m
@@ -0,0 +1,77 @@
+//
+//  H264.m
+//  Dropcam
+//
+//  Created by Loren Kirkby on 12/18/09.
+//  Copyright 2009 Dropcam. All rights reserved.
+//
+
+#import "H264.h"
+
+uint8_t get_h264_nalu_type(NSData *packet) 
+{
+  if ([packet length] < 1)
+    return -1;
+  
+  uint8_t header[1] = {0};
+  [packet getBytes:header length:sizeof(header)];
+  
+  uint8_t nal_unit_type = (header[0] & 0x1F);
+  return nal_unit_type;
+}
+
+BOOL isStartPacket(NSData *packet) 
+{
+  if ([packet length] < 1)
+    return NO;
+  
+  uint8_t header[1] = {0};
+  [packet getBytes:header length:sizeof(header)];
+  
+  uint8_t nal_unit_type = (header[0] & 0x1F);
+  return nal_unit_type == 5 || nal_unit_type == 7 || nal_unit_type == 8;
+}
+
+
+static BOOL readBit(NSData *data, int bitpos)
+{
+  uint8_t byte = ((uint8_t*)data.bytes)[bitpos / 8];
+  int shift = 7 - (bitpos % 8);
+  uint8_t mask = byte & (1 << shift);
+  return mask >> shift;
+}
+
+static uint32_t readExpGolomb(NSData *data, int *bitpos)
+{
+  // Section 9.1
+  int leadingZeroBits = -1;
+  for (int b = 0; !b; leadingZeroBits++) {
+    b = readBit(data, (*bitpos)++);
+  }
+  
+  uint32_t suffix = 0;
+  for (int i = 1; i <= leadingZeroBits; i++) {
+    suffix |= readBit(data, (*bitpos)++) << (leadingZeroBits - i);
+  }
+  
+  uint32_t pow2 = (1 << leadingZeroBits);
+  return pow2 - 1 + suffix;
+}
+
+H264SliceType getH264SliceType(NSData *packet)
+{
+  if ([packet length] < 2)
+    return -1;
+  
+  // Section 7.2
+  uint8_t naluType = get_h264_nalu_type(packet);
+  if (naluType != H264_NAL_SLICE_TYPE && naluType != H264_NAL_IDR_TYPE) {
+    return -1;
+  }
+  
+  // Section 7.3
+  int bitpos = 8;
+  uint32_t first_mb = readExpGolomb(packet, &bitpos); (void)first_mb;
+  uint32_t slice_type = readExpGolomb(packet, &bitpos);
+  return slice_type;
+}
\ No newline at end of file
diff --git a/NexusVideoPlayer/HWVideoDecoder.h b/NexusVideoPlayer/HWVideoDecoder.h
new file mode 100644
index 0000000..909cb5c
--- /dev/null
+++ b/NexusVideoPlayer/HWVideoDecoder.h
@@ -0,0 +1,20 @@
+//
+//  HWVideoDecoder.h
+//  NexusVideoPlayer
+//
+//  Created by lorenkirkby on 9/19/14.
+//  Copyright (c) 2014 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+#import "VideoDecoderBase.h"
+#import "Nexustalk.pb.h"
+
+@interface HWVideoDecoder : NSObject<VideoDecoder>
+
+- (id)initWithCodecType:(CodecType)codecType privateData:(PBArray*)privateDataArray timebase:(int)timebase;
++ (BOOL)isSupported;
+
+@property (nonatomic, weak) id<VideoDecoderDelegate> delegate;
+
+@end
diff --git a/NexusVideoPlayer/HWVideoDecoder.m b/NexusVideoPlayer/HWVideoDecoder.m
new file mode 100644
index 0000000..4f9d331
--- /dev/null
+++ b/NexusVideoPlayer/HWVideoDecoder.m
@@ -0,0 +1,292 @@
+//
+//  HWVideoDecoder.m
+//  NexusVideoPlayer
+//
+//  Created by lorenkirkby on 9/19/14.
+//  Copyright (c) 2014 Dropcam. All rights reserved.
+//
+
+#import "HWVideoDecoder.h"
+#import "H264.h"
+#import "NLCommonLoggingNVP.h"
+#import <VideoToolbox/VideoToolbox.h>
+
+#define NUM_BUFFER_FRAMES_WITH_BFRAMES 3
+
+#pragma mark - HWVideoDecoder interface
+
+@interface HWVideoDecoder() {
+  NSData *_sps;
+  NSData *_pps;
+  
+  NSMutableArray *_decodedFrames;
+  
+  BOOL _hasBFrames;
+  
+  VTDecompressionSessionRef _decompressionSession;
+  CMVideoFormatDescriptionRef _formatDesc;
+  int _timebase;
+}
+
+- (void)didDecompress:(DecodedFrame*)frame;
+
+@end
+
+#pragma mark - VideoToolBox Decompress Frame CallBack
+/*
+ This callback gets called everytime the decompresssion session decodes a frame
+ */
+static void didDecompress(void *decompressionOutputRefCon,
+                          void *sourceFrameRefCon,
+                          OSStatus status,
+                          VTDecodeInfoFlags infoFlags,
+                          CVImageBufferRef imageBuffer,
+                          CMTime presentationTimeStamp,
+                          CMTime presentationDuration ) {
+
+  HWVideoDecoder *decoder = (__bridge HWVideoDecoder*)decompressionOutputRefCon;
+  
+  if (imageBuffer != NULL) {
+    CGSize size = CVImageBufferGetDisplaySize(imageBuffer);
+    
+    DecodedFrame *frame = [DecodedFrame decodedFrameWithWidth:size.width
+                                                       height:size.height
+                                                  imageBuffer:imageBuffer
+                                                    timestamp:presentationTimeStamp.value];
+    
+    [decoder didDecompress:frame];
+  }
+}
+
+#pragma mark - HWVideoDecoder implementation
+
+@implementation HWVideoDecoder
+
++ (BOOL)isSupported {
+  // HW decoder is only supported on iOS 8 or higher.  Need to check for the actual iOS version because
+  // VideoToolbox actually exists as a private API on iOS 7 but it doesn't work.
+  return SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(@"8");
+}
+
+- (id)initWithCodecType:(CodecType)codecType privateData:(PBArray*)privateDataArray timebase:(int)timebase {
+  self = [super init];
+  if (self) {
+    _timebase = timebase;
+    _decodedFrames = [[NSMutableArray alloc] init];
+    
+    // Run any out of band NAL units through the decoder
+    for (NSData *item in privateDataArray) {
+      [self decodeFrame:item PTS:0];
+    }
+  }
+  
+  return self;
+}
+
+- (void)dealloc {
+  [self destroyDecoder];
+}
+
+
+- (void)didDecompress:(DecodedFrame*)frame {
+  // Add frame to the output buffer and sort by PTS (to reorder any B-frames)
+  [_decodedFrames addObject:frame];
+  [_decodedFrames sortUsingComparator:^NSComparisonResult(id o1, id o2) {
+    int64_t pts1 = ((DecodedFrame*)o1).PTS;
+    int64_t pts2 = ((DecodedFrame*)o2).PTS;
+    
+    if (pts1 > pts2)
+      return NSOrderedDescending;
+    
+    if (pts1 < pts2)
+      return NSOrderedAscending;
+    
+    return NSOrderedSame;
+  }];
+  
+  if (_hasBFrames && _decodedFrames.count < NUM_BUFFER_FRAMES_WITH_BFRAMES) {
+    // We have B-frames, need to buffer more before outputting a frame
+    return;
+  }
+  
+  // Ready to output a frame
+  DecodedFrame *outFrame = _decodedFrames[0];
+  [_decodedFrames removeObjectAtIndex:0];
+  
+  [self.delegate onFrameDecoded:outFrame];
+}
+
+- (CMSampleBufferRef)sampleBufferForFrameData:(NSData*)frameData withPTS:(uint64_t)PTS {
+  CMBlockBufferRef block;
+  
+  NSInteger blockLen = frameData.length + 4;
+  NSInteger res = CMBlockBufferCreateWithMemoryBlock(
+                                           kCFAllocatorDefault,
+                                           NULL,                 // &memory_block
+                                           blockLen,             // block_length
+                                           kCFAllocatorDefault,  // block_allocator
+                                           NULL,                 // &custom_block_source
+                                           0,                    // offset_to_data
+                                           blockLen,             // data_length
+                                           0,                    // flags
+                                           &block);
+  
+  
+  if (res != noErr) {
+    NLLogNVPError(@"Failed to create block buffer: %d", (int)res);
+    return NULL;
+  }
+  
+  // Copy the data into the buffer with size header
+  uint32_t size = htonl(frameData.length);
+  CMBlockBufferReplaceDataBytes(&size, block, 0, 4);
+  CMBlockBufferReplaceDataBytes(frameData.bytes, block, 4, frameData.length);
+  
+  CMSampleBufferRef sampleBuffer;
+  CMSampleTimingInfo timingInfo = kCMTimingInfoInvalid;
+  timingInfo.presentationTimeStamp.value = PTS;
+  timingInfo.presentationTimeStamp.timescale = _timebase;
+  timingInfo.presentationTimeStamp.flags = kCMTimeFlags_Valid;
+  
+  res = CMSampleBufferCreate(
+                         kCFAllocatorDefault,
+                         block,                // data_buffer
+                         true,                 // data_ready
+                         NULL,                 // make_data_ready_callback
+                         NULL,                 // make_data_ready_refcon
+                         _formatDesc,          // format_description
+                         1,                    // num_samples
+                         1,                    // num_sample_timing_entries
+                         &timingInfo,          // &sample_timing_array
+                         0,                    // num_sample_size_entries
+                         NULL,                 // &sample_size_array
+                         &sampleBuffer
+                         );
+  
+  CFRelease(block);
+  
+  if (res != noErr) {
+    NLLogNVPError(@"Failed to create sample buffer: %d", (int)res);
+    return NULL;
+  }
+  
+  return sampleBuffer;
+}
+
+- (void)destroyDecoder {
+  if (_decompressionSession) {
+    CFRelease(_decompressionSession);
+    _decompressionSession = NULL;
+  }
+  
+  if (_formatDesc) {
+    CFRelease(_formatDesc);
+    _formatDesc = NULL;
+  }
+}
+
+- (void)decodeFrame:(NSData*)frameData PTS:(uint64_t)PTS {
+  OSStatus res;
+  
+  uint8_t naluType = get_h264_nalu_type(frameData);
+
+  // Store the SPS and PPS so that we can initialize the decoder.  Also re-initialize
+  // the decoder if it looks like the SPS or PPS has changed.
+  
+  if (naluType == H264_NAL_SPS_TYPE) {
+    if (_sps && ![_sps isEqualToData:frameData]) {
+      NLLogNVPInfo(@"SPS changed - reinitializing decoder");
+      [self destroyDecoder];
+    }
+    
+    _sps = frameData;
+    return;
+  }
+  
+  if (naluType == H264_NAL_PPS_TYPE) {
+    if (_pps && ![_pps isEqualToData:frameData]) {
+      NLLogNVPInfo(@"PPS changed - reinitializing decoder");
+      [self destroyDecoder];
+    }
+    
+    _pps = frameData;
+    return;
+  }
+  
+  // Check for the presence of B-frames
+  if (naluType == H264_NAL_SLICE_TYPE) {
+    H264SliceType sliceType = getH264SliceType(frameData);
+    if (sliceType == SLICE_B || sliceType == SLICE_B_ONLY) {
+      _hasBFrames = YES;
+    }
+  }
+  
+  // If we have an SPS and PPS, create the decoder
+  if (_decompressionSession == NULL && _sps && _pps) {
+    const uint8_t *parameter_sets[] = { _sps.bytes, _pps.bytes };
+    const size_t sizes[] = { _sps.length, _pps.length };
+    
+    res = CMVideoFormatDescriptionCreateFromH264ParameterSets(
+                                                          NULL,
+                                                          2,
+                                                          parameter_sets,
+                                                          sizes,
+                                                          4,
+                                                          &_formatDesc);
+    if (res != noErr) {
+      NLLogNVPError(@"Failed to create format description: %d", (int)res);
+      return;
+    }
+    
+    CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(_formatDesc);
+    NLLogNVPInfo(@"Creating decoder for video dimensions: %dx%d", dims.width, dims.height);
+    
+    VTDecompressionOutputCallbackRecord callBackRecord;
+    callBackRecord.decompressionOutputCallback = didDecompress;
+    callBackRecord.decompressionOutputRefCon = (__bridge void *)self;
+    
+    // Request NV12 format (YUV biplanar) as this seems to be the preferred and fasted format for iOS devices.
+    NSDictionary *destFormatDict = @{
+      (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
+      (__bridge NSString *)kCVPixelBufferOpenGLCompatibilityKey: @YES
+      };
+    
+    res = VTDecompressionSessionCreate(
+                                   kCFAllocatorDefault,
+                                   _formatDesc,
+                                   NULL,
+                                   (__bridge CFDictionaryRef)destFormatDict,
+                                   &callBackRecord,
+                                   &_decompressionSession);
+    
+    if (res != noErr) {
+      NLLogNVPError(@"Failed to create decoder: %d", (int)res);
+      return;
+    }
+  }
+  
+  // Feed the frame to the decoder
+  if (_decompressionSession != NULL) {
+    CMSampleBufferRef sampleBuffer = [self sampleBufferForFrameData:frameData withPTS:PTS];
+    
+    if (sampleBuffer) {
+      VTDecodeFrameFlags flags = 0; //kVTDecodeFrame_EnableAsynchronousDecompression;
+      VTDecodeInfoFlags flagOut;
+      res = VTDecompressionSessionDecodeFrame(
+                                          _decompressionSession,
+                                          sampleBuffer,
+                                          flags,
+                                          NULL,
+                                          &flagOut);
+      
+      CFRelease(sampleBuffer);
+      
+      if (res != noErr) {
+        NLLogNVPError(@"Error decoding frame: %d", (int)res);
+      }
+    }
+  }
+}
+
+@end
+
diff --git a/NexusVideoPlayer/NLCommonLoggingNVP.h b/NexusVideoPlayer/NLCommonLoggingNVP.h
new file mode 100755
index 0000000..83223df
--- /dev/null
+++ b/NexusVideoPlayer/NLCommonLoggingNVP.h
@@ -0,0 +1,9 @@
+#import <Foundation/Foundation.h>
+
+@interface NLCommonLoggingNexusVideoPlayer : NSObject
+
+FOUNDATION_EXPORT void NLLogNVPError(NSString *format, ...)  NS_FORMAT_FUNCTION(1, 0);
+FOUNDATION_EXPORT void NLLogNVPWarn(NSString *format, ...)   NS_FORMAT_FUNCTION(1, 0);
+FOUNDATION_EXPORT void NLLogNVPInfo(NSString *format, ...)   NS_FORMAT_FUNCTION(1, 0);
+
+@end
diff --git a/NexusVideoPlayer/NLCommonLoggingNVP.m b/NexusVideoPlayer/NLCommonLoggingNVP.m
new file mode 100755
index 0000000..ce067db
--- /dev/null
+++ b/NexusVideoPlayer/NLCommonLoggingNVP.m
@@ -0,0 +1,39 @@
+#import "NLCommonLoggingNVP.h"
+#import <NLCommonLogging/NLCommonLoggingConfig.h>
+#import <NLCommonLogging/NLCommonLogging.h>
+
+@implementation NLCommonLoggingNexusVideoPlayer
+
+NSString *const NLLoggingSourceNexusVideoPlayer = @"NexusVideoPlayer";
+
+void NLLogNVPError(NSString *format, ...) {
+    va_list args;
+    va_start(args, format);
+    
+    NSString *text = [[NSString alloc] initWithFormat:format arguments:args];
+    NLCLogError(NLLoggingSourceNexusVideoPlayer, @"%@", text);
+    
+    va_end(args);
+}
+
+void NLLogNVPWarn(NSString *format, ...) {
+    va_list args;
+    va_start(args, format);
+    
+    NSString *text = [[NSString alloc] initWithFormat:format arguments:args];
+    NLCLogWarn(NLLoggingSourceNexusVideoPlayer, @"%@", text);
+    
+    va_end(args);
+}
+
+void NLLogNVPInfo(NSString *format, ...) {
+    va_list args;
+    va_start(args, format);
+    
+    NSString *text = [[NSString alloc] initWithFormat:format arguments:args];
+    NLCLogInfo(NLLoggingSourceNexusVideoPlayer, @"%@", text);
+    
+    va_end(args);
+}
+
+@end
diff --git a/NexusVideoPlayer/NSCondition+UnlockAfter.h b/NexusVideoPlayer/NSCondition+UnlockAfter.h
new file mode 100644
index 0000000..a5df2fa
--- /dev/null
+++ b/NexusVideoPlayer/NSCondition+UnlockAfter.h
@@ -0,0 +1,14 @@
+//
+//  NSCondition+UnlockAfter.h
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 1/23/14.
+//  Copyright (c) 2014 Dropcam. All rights reserved.
+//
+
+@interface NSCondition (UnlockAfter)
+
+- (void)unlockAfter:(void (^)(void))block;
+- (id)unlockAfterReturning:(id (^)(void))block;
+
+@end
diff --git a/NexusVideoPlayer/NSCondition+UnlockAfter.m b/NexusVideoPlayer/NSCondition+UnlockAfter.m
new file mode 100644
index 0000000..b568118
--- /dev/null
+++ b/NexusVideoPlayer/NSCondition+UnlockAfter.m
@@ -0,0 +1,27 @@
+//
+//  NSCondition+UnlockAfter.m
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 1/23/14.
+//  Copyright (c) 2014 Dropcam. All rights reserved.
+//
+
+#import "NSCondition+UnlockAfter.h"
+
+@implementation NSCondition (UnlockAfter)
+
+- (void)unlockAfter:(void (^)(void))block {
+  [self lock];
+  block();
+  [self unlock];
+}
+
+- (id)unlockAfterReturning:(id (^)(void))block {
+  id retval = nil;
+  [self lock];
+  retval = block();
+  [self unlock];
+  return retval;
+}
+
+@end
diff --git a/NexusVideoPlayer/NexusTalkConnection.h b/NexusVideoPlayer/NexusTalkConnection.h
new file mode 100644
index 0000000..42e0e31
--- /dev/null
+++ b/NexusVideoPlayer/NexusTalkConnection.h
@@ -0,0 +1,51 @@
+//
+//  NexusTalkConnection.h
+//  Dropcam
+//
+//  Created by Loren Kirkby on 8/17/11.
+//  Copyright 2011 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+#import "AsyncSocket.h"
+#import <ProtocolBuffers/Nexustalk.pb.h>
+
+#define NEXUSTALK_PORT 1443
+
+@protocol NexusTalkConnectionDelegate;
+
+@interface NexusTalkConnection : NSObject
+
+- (instancetype)initWithHost:(NSString*)server
+                  cameraUuid:(NSString*)uuid
+            requireConnected:(BOOL)requireConnected
+                sessionToken:(NSString*)sessionToken
+                   userAgent:(NSString *)userAgent;
+
+- (void)connectWithCompletion:(void(^)())completionHandler;
+- (void)connect;
+- (void)close;
+// NexusTalk
+- (void)sendAudioPayload:(NSData*)payload sessionId:(uint32_t)sessionId codecType:(NSNumber*)codecType sampleRate:(NSNumber*)sampleRate;
+- (void)sendStartPlayback:(AVProfile)profile playbackSessionId:(uint32_t)playbackSessionId startTime:(NSNumber*)startTime;
+- (void)sendStopPlayback:(uint32_t)playbackSessionId;
+
+@property (nonatomic, weak) id <NexusTalkConnectionDelegate> delegate;
+@property (nonatomic, readonly) BOOL isConnected;
+@property (nonatomic, copy, readonly) NSString *uuid;
+@property (nonatomic, copy, readonly) NSString *sessionToken;
+@property (nonatomic, copy, readonly) NSString *serverHostname;
+@property (nonatomic, copy, readonly) NSString *userAgent;
+@end
+
+@protocol NexusTalkConnectionDelegate <NSObject>
+// NexusTalk Messages
+- (void)connection:(NexusTalkConnection*)connection didEstablishConnection:(NSDictionary*)serverParams;
+- (void)connection:(NexusTalkConnection*)connection didDisconnectWithError:(NSError*)error;
+- (void)connection:(NexusTalkConnection*)connection didReceiveErrorCode:(ErrorCode)code andMessage:(NSString*)message;
+- (void)connection:(NexusTalkConnection*)connection didReceivePlaybackBegin:(PlaybackBegin*)message;
+- (void)connection:(NexusTalkConnection*)connection didReceivePlaybackPacket:(PlaybackPacket*)packet;
+- (void)connection:(NexusTalkConnection*)connection didReceivePlaybackEnd:(PlaybackEnd*)message;
+- (void)connection:(NexusTalkConnection *)connection didReceiveRedirect:(Redirect *)redirectMessage;
+
+@end
diff --git a/NexusVideoPlayer/NexusTalkConnection.m b/NexusVideoPlayer/NexusTalkConnection.m
new file mode 100644
index 0000000..213b299
--- /dev/null
+++ b/NexusVideoPlayer/NexusTalkConnection.m
@@ -0,0 +1,426 @@
+//
+//  NexusTalkConnection.m
+//  Dropcam
+//
+//  Created by Loren Kirkby on 8/17/11.
+//  Copyright 2011 Dropcam. All rights reserved.
+//
+
+#import "NexusTalkConnection.h"
+#import "NexusTalkTimerTarget.h"
+#import "DebugLog.h"
+#import "NLCommonLoggingNVP.h"
+
+struct PackedShortHeader {
+  uint8_t type;
+  uint16_t len;
+} __attribute__((packed));
+
+struct PackedLongHeader {
+  uint8_t type;
+  uint32_t len;
+} __attribute__((packed));
+
+static const int header_tag = 1;
+static const int payload_tag = 2;
+
+static const CGFloat PingTimerInterval = 20.0f;
+static const NSTimeInterval NexusTalkTimeout = 40.0f;
+static const UInt16 NexusPort = 1443;
+
+@interface NexusTalkConnection() <NexusTalkTimerTargetProtocol> {
+  dispatch_queue_t _procQueue;
+}
+@property (strong, nonatomic) AsyncSocket *socket;
+@property (nonatomic, readwrite) BOOL isConnected; 
+@property (copy, nonatomic) NSString *uuid;
+@property (copy, nonatomic) NSString *sessionToken;
+@property (copy, nonatomic) NSString *serverHostname;
+@property (assign, nonatomic) BOOL requireConnected;
+@property (assign, nonatomic) PacketType lastRecvType;
+@property (copy, nonatomic) void (^didConnectCompletionHandler)();
+@property (strong, nonatomic) NSTimer *pingTimer;
+@property (strong, nonatomic) NSString *userAgent;
+@end
+
+@implementation NexusTalkConnection {
+  NSMutableData *headerBuffer;
+}
+
++ (int)getPacketHeaderSize:(PacketType)type {
+  int headerSize;
+  
+  switch (type) {
+    case PacketTypePacketTypeLongPlaybackPacket:
+      headerSize = 4;
+      break;
+      
+    default:
+      headerSize = 2;
+      break;
+  }
+  
+  return headerSize;
+}
+
+- (instancetype)initWithHost:(NSString*)server
+                  cameraUuid:(NSString*)uuid
+            requireConnected:(BOOL)requireConnected
+                sessionToken:(NSString*)sessionToken
+                   userAgent:(NSString *)userAgent
+{
+  if ((self = [super init])) {
+    self.uuid = uuid;
+    self.serverHostname = server;
+    self.sessionToken = sessionToken;
+    self.requireConnected = requireConnected;
+    self.userAgent = userAgent;
+    headerBuffer = [[NSMutableData alloc] init];
+    _isConnected = NO;
+    _procQueue = dispatch_queue_create("NexusTalkConnection Queue", DISPATCH_QUEUE_SERIAL);
+  }
+  
+  return self;
+}
+
+- (void)dealloc {
+  [self.pingTimer invalidate];
+}
+
+- (void)connectWithCompletion:(void (^)())completionHandler {
+  self.didConnectCompletionHandler = completionHandler;
+  [self connect];
+}
+
+- (void)connect {
+  dispatch_async(dispatch_get_main_queue(), ^{
+    self.socket = [[AsyncSocket alloc] initWithDelegate:self];
+    
+    // Common run loop modes; this allows us to receive data while touch event tracking.
+    // see: http://stackoverflow.com/a/7223765
+    [self.socket setRunLoopModes:[NSArray arrayWithObject:NSRunLoopCommonModes]];
+    
+    NSError *err = nil;
+    if(![self.socket connectToHost:self.serverHostname
+                            onPort:NexusPort
+                       withTimeout:NexusTalkTimeout error:&err]) {
+      NLLogNVPError(@"connectToHost Error: %@", err);
+      [self onSocket:nil willDisconnectWithError:err];
+      [self onSocketDidDisconnect:nil];
+    }
+  });
+}
+
+- (void)close {
+  __weak typeof(self) weakSelf = self;
+  dispatch_async(dispatch_get_main_queue(), ^{
+    if (weakSelf.socket) {
+      NLLogNVPInfo(@"Closing NexusTalk connection");
+      [weakSelf.socket disconnect];
+      weakSelf.isConnected = NO;
+      [weakSelf.socket setDelegate:nil];
+      weakSelf.socket = nil;
+    }
+  });
+}
+
+- (void)readNextPacket 
+{
+  [headerBuffer setLength:0];
+  [self.socket readDataToLength:3 withTimeout:NexusTalkTimeout buffer:headerBuffer bufferOffset:0 tag:header_tag];
+}
+
+- (void)sendMessage:(PacketType)type message:(PBGeneratedMessage*)msg 
+{
+  NLLogNVPInfo(@"Sending Nexustalk message of type %d", type);
+  NSData *payloadData = [msg data];
+  NSMutableData *headerData = nil;
+  
+  if ([NexusTalkConnection getPacketHeaderSize:type] == 2) {
+    struct PackedShortHeader hdr = {
+      .type = type,
+      .len = htons([payloadData length])
+    };
+    
+    headerData = [[NSData dataWithBytes:&hdr length:sizeof(hdr)] mutableCopy];
+  }
+  else {
+    struct PackedLongHeader hdr = {
+      .type = type,
+      .len = htonl([payloadData length])
+    };
+    
+    headerData = [[NSData dataWithBytes:&hdr length:sizeof(hdr)] mutableCopy];
+  }
+   __weak typeof(self) weakSelf = self;
+  dispatch_async(dispatch_get_main_queue(), ^{
+    [weakSelf.socket writeData:headerData withTimeout:NexusTalkTimeout tag:header_tag];
+    [weakSelf.socket writeData:payloadData withTimeout:NexusTalkTimeout tag:payload_tag];
+  });
+}
+
+
+- (void)sendAudioPayload:(NSData*)payload sessionId:(uint32_t)sessionId codecType:(NSNumber*)codecType sampleRate:(NSNumber*)sampleRate {
+  AudioPayloadBuilder* audioBuilder = [[[AudioPayload builder] 
+                                         setPayload:payload]
+                                        setSessionId:sessionId];
+  
+  if (codecType) {
+    [audioBuilder setCodec:[codecType intValue]];
+  }
+  
+  if (sampleRate) {
+    [audioBuilder setSampleRate:[sampleRate intValue]];
+  }
+  
+  [self sendMessage:PacketTypePacketTypeAudioPayload message:[audioBuilder build]];
+}
+
+- (void)sendStartPlayback:(AVProfile)profile playbackSessionId:(uint32_t)playbackSessionId startTime:(NSNumber*)startTime {
+  StartPlaybackBuilder *startPlaybackBuilder = [[[StartPlayback builder]
+                                                 setSessionId:playbackSessionId]
+                                                setProfile:profile];
+  NSMutableArray *additionalAVProfiles = [[NSMutableArray alloc] initWithArray:@[@(AVProfileAudioAac), @(AVProfileAudioSpeex), @(AVProfileAudioOpus), @(profile)]];
+  NSArray *additionalVideoProfiles;
+  if (profile == AVProfileVideoH2642MbitL40) {
+    additionalVideoProfiles = @[@(AVProfileVideoH264100KbitL30), @(AVProfileVideoH264530KbitL31)];
+  } else if (profile == AVProfileVideoH264530KbitL31) {
+    additionalVideoProfiles = @[@(AVProfileVideoH264100KbitL30)];
+  }
+  [additionalAVProfiles addObjectsFromArray:additionalVideoProfiles];
+  [startPlaybackBuilder setOtherProfilesArray:additionalAVProfiles];
+  if (startTime) {
+    [startPlaybackBuilder setStartTime:[startTime doubleValue]];
+  }
+  [self sendMessage:PacketTypePacketTypeStartPlayback message:[startPlaybackBuilder build]];
+}
+
+- (void)sendStopPlayback:(uint32_t)playbackSessionId {
+  StopPlaybackBuilder *stopPlaybackBuilder = [[StopPlayback builder]
+                                              setSessionId:playbackSessionId];
+  
+  [self sendMessage:PacketTypePacketTypeStopPlayback message:[stopPlaybackBuilder build]];
+}
+
+- (void)handleNTMessage:(PacketType)type message:(PBGeneratedMessage *)msg {
+  switch (type) {
+      case PacketTypePacketTypeOk: {
+          NLLogNVPInfo(@"NexusTalk successfully initiated");
+          
+          if ([self.delegate respondsToSelector:@selector(connection:didEstablishConnection:)]) {
+            [self.delegate connection:self didEstablishConnection:nil];
+          }
+          
+          if (self.didConnectCompletionHandler) {
+            self.didConnectCompletionHandler();
+            self.didConnectCompletionHandler = nil;
+          }
+          if (self.pingTimer == nil) {
+              NexusTalkTimerTarget *timerTarget = [[NexusTalkTimerTarget alloc] init];
+              timerTarget.actualTarget = self;
+              NSTimer *pingTimer = [NSTimer timerWithTimeInterval:PingTimerInterval
+                                                     target:timerTarget
+                                                   selector:@selector(timerFired:)
+                                                   userInfo:nil
+                                                    repeats:YES];
+              
+              [[NSRunLoop currentRunLoop] addTimer:pingTimer forMode:NSRunLoopCommonModes];
+              self.pingTimer = pingTimer;
+          }
+          break;
+      }
+      
+    case PacketTypePacketTypeError: {
+      Error *errorMsg = (Error*)msg;
+      NLLogNVPError(@"NexusTalk error: %@ (%d)", [errorMsg message], [errorMsg code]);
+      if ([self.delegate respondsToSelector:@selector(connection:didReceiveErrorCode:andMessage:)]) {
+        [self.delegate connection:self didReceiveErrorCode:[errorMsg code] andMessage:[errorMsg message]];
+      }
+      break;
+    }
+      
+    case PacketTypePacketTypePlaybackBegin: {
+      PlaybackBegin *playbackMsg = (PlaybackBegin*)msg;
+      if ([self.delegate respondsToSelector:@selector(connection:didReceivePlaybackBegin:)]) {
+        [self.delegate connection:self didReceivePlaybackBegin:playbackMsg];
+      }
+      break;
+    }
+      
+    case PacketTypePacketTypePlaybackPacket: {
+      PlaybackPacket *packet = (PlaybackPacket*)msg;
+      if ([self.delegate respondsToSelector:@selector(connection:didReceivePlaybackPacket:)]) {
+        [self.delegate connection:self didReceivePlaybackPacket:packet];
+      }
+      break;
+    }
+      
+    case PacketTypePacketTypeLongPlaybackPacket: {
+      PlaybackPacket *packet = (PlaybackPacket*)msg;
+      if ([self.delegate respondsToSelector:@selector(connection:didReceivePlaybackPacket:)]) {
+        [self.delegate connection:self didReceivePlaybackPacket:packet];
+      }
+      break;
+    }
+      
+    case PacketTypePacketTypePlaybackEnd: {
+      PlaybackEnd *playbackMsg = (PlaybackEnd*)msg;
+      if ([self.delegate respondsToSelector:@selector(connection:didReceivePlaybackEnd:)]) {
+        [self.delegate connection:self didReceivePlaybackEnd:playbackMsg];
+      }
+      break;
+    }
+    case PacketTypePacketTypeRedirect: {
+        NLLogNVPInfo(@"Redirecting");
+      Redirect *redirectMessage = (Redirect *)msg;
+      if ([self.delegate respondsToSelector:@selector(connection:didReceiveRedirect:)]) {
+        [self.delegate connection:self didReceiveRedirect:redirectMessage];
+      }
+    }
+      
+    default:
+      NLLogNVPWarn(@"Received unknown Nexustalk message type: %d", type);
+      break;
+  }
+}
+
+- (void)parseNTMessage:(PacketType)type data:(NSData*)data {
+  dispatch_async(_procQueue, ^{
+    PBGeneratedMessage *msg = nil;
+    
+    @try {
+      switch (type) {
+        case PacketTypePacketTypeError:
+          msg = [Error parseFromData:data];
+          break;
+          
+        case PacketTypePacketTypePlaybackBegin:
+          msg = [PlaybackBegin parseFromData:data];
+          break;
+          
+        case PacketTypePacketTypeRedirect:
+          msg = [Redirect parseFromData:data];
+          break;
+          
+        case PacketTypePacketTypeLongPlaybackPacket:
+        case PacketTypePacketTypePlaybackPacket:
+          msg = [PlaybackPacket parseFromData:data];
+          break;
+          
+        case PacketTypePacketTypePlaybackEnd:
+          msg = [PlaybackEnd parseFromData:data];
+          break;
+          
+        default:
+          break;
+      }
+    }
+    @catch (NSException *exception) {
+      NLLogNVPError(@"PacketTypePacketTypeError parse failure: %@, %@", exception.name, exception.reason);
+    }
+    
+      __weak typeof(self) weakSelf = self;
+    dispatch_async(dispatch_get_main_queue(), ^{
+      [weakSelf handleNTMessage:type message:msg];
+    });
+  });
+}
+
+#pragma mark AsyncSocket delegate methods
+
+- (void)onSocket:(AsyncSocket *)sock willDisconnectWithError:(NSError *)err {
+  NLLogNVPError(@"NexusTalk connection failed: %@", err);
+}
+
+- (void)onSocketDidDisconnect:(AsyncSocket *)sock{
+  dispatch_async(dispatch_get_main_queue(), ^{
+    [self.pingTimer invalidate];
+    NLLogNVPInfo(@"NexusTalk connection disconnected.");
+    self.isConnected = NO;
+        
+    if ([self.delegate respondsToSelector:@selector(connection:didDisconnectWithError:)]) {
+        [self.delegate connection:self didDisconnectWithError:nil];
+    }
+        
+    self.socket.delegate = nil;
+  });
+}
+
+- (void)onSocket:(AsyncSocket *)asyncsock didConnectToHost:(NSString *)host port:(UInt16)port
+{
+	NLLogNVPInfo(@"Connected to host %@:%hu.  Starting TLS.", host, port);
+  self.isConnected = YES;
+  
+  NSMutableDictionary *tlsDict = [NSMutableDictionary dictionaryWithObject:self.serverHostname forKey:(id)kCFStreamSSLPeerName];
+#ifdef NO_SSL_VALIDATE
+  [tlsDict setValue:(id)kCFBooleanTrue forKey:(id)kCFStreamSSLAllowsAnyRoot];
+  [tlsDict setValue:(id)kCFBooleanFalse forKey:(id)kCFStreamSSLValidatesCertificateChain];
+#endif
+  
+  [self.socket startTLS:tlsDict];
+}
+
+- (void)onSocket:(AsyncSocket *)asyncsock didReadData:(NSData *)data withTag:(long)tag 
+{
+  if (tag == header_tag) {
+    // We have at least 3 bytes
+    const struct PackedShortHeader *header = [headerBuffer bytes];
+    self.lastRecvType = header->type;
+    
+    BOOL isLongPacket = ([NexusTalkConnection getPacketHeaderSize:header->type] == 4);
+    if (isLongPacket && [data length] == 3) {
+      // Need to read 2 more bytes
+      [self.socket readDataToLength:2 withTimeout:NexusTalkTimeout buffer:headerBuffer bufferOffset:3 tag:header_tag];
+      return;
+    }
+    
+    int payloadLen;
+    if ([data length] == 3) {
+      const struct PackedShortHeader *header = [headerBuffer bytes];
+      payloadLen = ntohs(header->len);
+    }
+    else {
+      const struct PackedLongHeader *header = [headerBuffer bytes];
+      payloadLen = ntohl(header->len);
+    }
+    
+    if (payloadLen > 0) {
+      [self.socket readDataToLength:payloadLen withTimeout:NexusTalkTimeout tag:payload_tag];
+    }
+    else {
+      [self parseNTMessage:self.lastRecvType data:nil];
+      [self readNextPacket];
+    }
+  } else if (tag == payload_tag) {
+    [self parseNTMessage:self.lastRecvType data:data];
+    [self readNextPacket];
+  }
+}
+
+- (void)onSocketDidSecure:(AsyncSocket *)asyncsock {
+  NLLogNVPInfo(@"TLS negotiation successful.  Sending HELLO.");
+  
+  HelloBuilder* helloBuilder = [[[[[Hello builder]
+                                   setProtocolVersion:HelloProtocolVersionVersion3]
+                                  setUuid:self.uuid]
+                                 setRequireConnectedCamera:self.requireConnected]
+                                setIsCamera:NO];
+  
+  if (self.sessionToken != nil) {
+    [helloBuilder setSessionToken:self.sessionToken];
+  }
+  if (self.userAgent != nil) {
+    [helloBuilder setUserAgent:self.userAgent];
+  }
+  Hello *hello = [helloBuilder build];
+  [self sendMessage:PacketTypePacketTypeHello message:hello];
+  [self readNextPacket];
+}
+
+#pragma mark - Private Methods
+
+- (void)timerFired:(NSTimer *)timer {
+    [self sendMessage:PacketTypePacketTypePing message:[[Ping builder] build]];
+}
+
+@end
diff --git a/NexusVideoPlayer/NexusTalkTimerTarget.h b/NexusVideoPlayer/NexusTalkTimerTarget.h
new file mode 100644
index 0000000..5047fba
--- /dev/null
+++ b/NexusVideoPlayer/NexusTalkTimerTarget.h
@@ -0,0 +1,13 @@
+#import <Foundation/Foundation.h>
+
+@protocol NexusTalkTimerTargetProtocol <NSObject>
+- (void)timerFired:(NSTimer *)timer; 
+@end
+
+@interface NexusTalkTimerTarget : NSObject
+
+@property (nonatomic, weak) id<NexusTalkTimerTargetProtocol> actualTarget;
+
+- (void)timerFired:(NSTimer *)timer;
+
+@end
diff --git a/NexusVideoPlayer/NexusTalkTimerTarget.m b/NexusVideoPlayer/NexusTalkTimerTarget.m
new file mode 100644
index 0000000..407744f
--- /dev/null
+++ b/NexusVideoPlayer/NexusTalkTimerTarget.m
@@ -0,0 +1,14 @@
+#import "NexusTalkTimerTarget.h"
+
+@implementation NexusTalkTimerTarget 
+
+- (void)timerFired:(NSTimer *)timer {
+    if ([self.actualTarget respondsToSelector:@selector(timerFired:)]) {
+        [self.actualTarget performSelector:@selector(timerFired:)
+                                withObject:timer];
+
+    }
+}
+
+@end
+
diff --git a/NexusVideoPlayer/NexusVideoPlayer-Prefix.pch b/NexusVideoPlayer/NexusVideoPlayer-Prefix.pch
new file mode 100644
index 0000000..a28c8cf
--- /dev/null
+++ b/NexusVideoPlayer/NexusVideoPlayer-Prefix.pch
@@ -0,0 +1,12 @@
+//
+// Prefix header for all source files of the 'NexusVideoPlayer' target in the 'NexusVideoPlayer' project
+//
+
+#ifdef __OBJC__
+#import <Foundation/Foundation.h> 
+#import <UIKit/UIKit.h>
+#import <AVFoundation/AVFoundation.h>
+
+#define SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(v)  ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] != NSOrderedAscending)
+
+#endif
diff --git a/NexusVideoPlayer/NexusVideoPlayer.h b/NexusVideoPlayer/NexusVideoPlayer.h
new file mode 100644
index 0000000..0622d82
--- /dev/null
+++ b/NexusVideoPlayer/NexusVideoPlayer.h
@@ -0,0 +1,70 @@
+//
+//  NexusVideoPlayer.h
+//  NexusVideoPlayer
+//
+//  Copyright (c) 2012 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+#import "Nexustalk.pb.h"
+#import "NexusTalkConnection.h"
+#import "VideoPlayer.h"
+#import "DCAdaptiveJitterBuffer.h"
+
+typedef enum  {
+  kVideoPlayerInit,
+  kVideoPlayerConnecting,
+  kVideoPlayerConnected,
+  kVideoPlayerPlaying,
+  kVideoPlayerBuffering,
+  kVideoPlayerStopped,
+  kVideoPlayerDisconnected
+} NexusVideoPlayerNGState;
+
+@protocol NexusPlayerDelegate;
+
+@interface NexusVideoPlayer : NSObject<NexusTalkConnectionDelegate, VideoPlayerDelegate>
++ (AVProfile)bestProfileForDeviceAndConnection;
+- (id)initWithNexusTalkConnection:(NexusTalkConnection *)connection
+                        playAudio:(BOOL)playAudio;
+
+// setting the connection to a new nexus talk connection will set the connection's delegate to this
+// video player. However, if you pass in nil to this method, it will remove the delegate as well as
+// remove the connection associated with this video player. A video player will only get frames to
+// show if it has a valid connection.
+- (void)setConnection:(NexusTalkConnection *)connection;
+- (void)connect;
+- (void)disconnect;
+- (void)startPlayingWithPlaybackProfile:(AVProfile)playbackProfile;
+- (void)startPlayingWithPlaybackProfile:(AVProfile)playbackProfile startTime:(NSDate *)startTime;
+- (void)stopPlaying;
+- (UIImage *)getSnapshot;
+- (void)refreshView;
+- (void)resetRenderView;
+- (void)clearView;
+- (void)setVolume:(CGFloat)volume;
+
+@property (nonatomic, weak) id<NexusPlayerDelegate> delegate;
+@property (nonatomic, readonly) UIView *view;
+@property (nonatomic, readonly) NexusVideoPlayerNGState playerState;
+@property (nonatomic, readonly) BOOL isPlaying;
+@end
+
+@protocol NexusPlayerDelegate <NSObject>
+- (void)playerDidEstablishConnection:(NexusVideoPlayer *)player;
+- (void)playerDidReceiveConnectionError:(NexusVideoPlayer *)player;
+- (void)playerDidDisconnect:(NexusVideoPlayer *)player;
+- (void)playerDidStartBuffering:(NexusVideoPlayer *)player;
+- (void)playerDidStartPlaying:(NexusVideoPlayer *)player;
+- (void)player:(NexusVideoPlayer *)player didShowFrame:(DecodedFrame *)frame sceneChangeData:(NSData *)sceneChangeData timestamp:(NSDate *)timestamp;
+- (void)playerHitPlaybackEnd:(NexusVideoPlayer *)player message:(PlaybackEnd *)message;
+/**
+ It is the resposibility of the NexusVideoPlayerDelegae to clean up any resources
+ associated with the old connection in the case of a redirect.
+ */
+- (void)playerDidSwapConnection:(NexusTalkConnection *)oldConnection
+         withRedirectConnection:(NexusTalkConnection *)newConnection;
+@optional
+- (void)playerDidPruneBuffer:(NexusVideoPlayer *)player;
+@end
+
diff --git a/NexusVideoPlayer/NexusVideoPlayer.m b/NexusVideoPlayer/NexusVideoPlayer.m
new file mode 100644
index 0000000..935450b
--- /dev/null
+++ b/NexusVideoPlayer/NexusVideoPlayer.m
@@ -0,0 +1,622 @@
+//
+//  NexusVideoPlayer.m
+//  NexusVideoPlayer
+//
+//  Copyright (c) 2012 Dropcam. All rights reserved.
+//
+
+#import "NexusVideoPlayer.h"
+#import "NexusTalkConnection.h"
+#import "GLRenderViewController.h"
+#import "VideoPlayer.h"
+#import "AudioPlayer.h"
+#import "AudioPlayerAAC.h"
+#import "AudioPlayerSpeex.h"
+#import "AudioPlayerOpus.h"
+#import "Device.h"
+#import "DebugLog.h"
+#import "Reachability.h"
+#import "H264.h"
+#import "NexusTalkTimerTarget.h"
+#import "NLCommonLoggingNVP.h"
+
+#define STATE_CHECK_TIMER_INTERVAL 0.1
+#define FPS_LOG_TIMER_INTERVAL 1.0
+#define STATS_LOG_TIMER_INTERVAL 2.0
+#define STALL_TIMEOUT 1.5
+
+//#define LOG_TIMING
+
+typedef enum StreamType {
+  ST_Video,
+  ST_Audio
+} StreamType;
+
+@interface StreamStateNG : NSObject
+@property (strong, nonatomic) PlaybackBeginStream *streamInfo;
+@property (assign, nonatomic) StreamType type;
+@property (assign, nonatomic) int timebase;
+@property (strong, nonatomic) NSDate *startTime;
+@property (strong, nonatomic) NSDate *lastRecvSystemTime;
+@property (assign, nonatomic) int64_t offset;
+@property (assign, nonatomic) int64_t seqNum;
+
+@property (strong, nonatomic) PlayerBase *player;
+@property (strong, nonatomic) DCAdaptiveJitterBuffer *buffer;
+@property (strong, nonatomic) DCTimestampSync *timestampSync;
+
+- (void)startPlaying;
+- (void)stopPlaying;
+@end
+
+@implementation StreamStateNG
+- (void)startPlaying {
+  [self.player start];
+}
+- (void)stopPlaying {
+  [self.player stop];
+}
+@end
+
+@interface NexusVideoPlayer () <NexusTalkTimerTargetProtocol> {
+  NSTimer *_stateCheckTimer;
+  NSTimer *_statsLogTimer;
+  
+  GLRenderViewController *_renderView;
+  int _playbackSessionId;
+  BOOL _playAudio;
+  BOOL _isLive;
+  
+  NSDate *_lastPresentTimestamp;
+}
+
+- (void)checkBuffering;
+- (void)logStats;
+
+@property (readwrite, nonatomic) NexusVideoPlayerNGState playerState;
+@property (retain, nonatomic) StreamStateNG *audioStream;
+@property (retain, nonatomic) StreamStateNG *videoStream;
+@property (strong, nonatomic) NSDate *lastBufferTime;
+@property (strong, nonatomic) NexusTalkConnection *connection;
+
+@end
+
+
+@implementation NexusVideoPlayer
+@synthesize playerState = _playerState;
+
++ (uint32_t) newPlaybackSessionId {
+  static uint32_t playbackSessionIdCounter = 0;
+  return ++playbackSessionIdCounter;
+}
+
++ (AVProfile)bestProfileForDeviceAndConnection {
+  // HD Main profile stream can be used if Device is 5th gen or higher *and*
+  // device is on Wi-Fi.
+  
+  if ([Device getDeviceClass] < DEVICE_CLASS_5TH_GEN) {
+    return AVProfileAvprofileMobile1;
+  }
+  
+  Reachability *wifiReach = [Reachability reachabilityForLocalWiFi];
+  
+  if ([wifiReach currentReachabilityStatus] != ReachableViaWiFi) {
+    return AVProfileAvprofileMobile1;
+  }
+  
+  return AVProfileAvprofileHdMain1;
+}
+
++ (BOOL)hasWifiConnection {
+  Reachability *wifiReach = [Reachability reachabilityForLocalWiFi];
+  if ([wifiReach currentReachabilityStatus] != ReachableViaWiFi) {
+    return YES;
+  }
+  return NO;
+}
+
++ (AudioPlayer*)audioPlayerWithCodecType:(CodecType)codecType sampleRate:(int)sampleRate packetBuffer:(DCAdaptiveJitterBuffer *)buffer privateData:(NSData *)privateData {
+  AudioPlayer *player = nil;
+  switch (codecType) {
+    case CodecTypeAac:
+      player = [[AudioPlayerAAC alloc] initWithSampleRate:sampleRate packetBuffer:buffer privateData:privateData];
+      break;
+    case CodecTypeSpeex:
+      player = [[AudioPlayerSpeex alloc] initWithSampleRate:sampleRate packetBuffer:buffer privateData:privateData];
+      break;
+    case CodecTypeOpus:
+      player = [[AudioPlayerOpus alloc] initWithSampleRate:sampleRate packetBuffer:buffer privateData:privateData];
+      break;
+
+    default:
+      return nil;
+  }
+
+  return player;
+}
+
+- (instancetype)initWithNexusTalkConnection:(NexusTalkConnection *)connection
+                        playAudio:(BOOL)playAudio {
+  if ((self = [super init])) {
+    _connection = connection;
+    _connection.delegate = self;
+    _playerState = kVideoPlayerInit;
+    _playAudio = playAudio;
+    _renderView = [[GLRenderViewController alloc] initWithNibName:nil bundle:nil];
+    _renderView.view.translatesAutoresizingMaskIntoConstraints = NO;
+  }
+
+  return self;
+}
+
+- (void)setConnection:(NexusTalkConnection *)connection {
+  [self stopPlaying];
+  if (connection) {
+    _connection = connection;
+    _connection.delegate = self;
+  } else {
+    _connection = nil;
+    _connection.delegate = nil;
+  }
+}
+
+- (void)resetRenderView {
+  _renderView = [[GLRenderViewController alloc] initWithNibName:nil bundle:nil];
+  _renderView.view.translatesAutoresizingMaskIntoConstraints = NO;
+}
+
+- (void)dealloc {
+  [self stopPlaying];
+  [_renderView.view removeFromSuperview];
+  _connection.delegate = nil;
+  [_statsLogTimer invalidate];
+  [_stateCheckTimer invalidate];
+}
+
+- (void)reset {
+  if (_playbackSessionId) {
+    NLLogNVPInfo(@"Stopping NexusTalk playback session with session id: %d", _playbackSessionId);
+    [_connection sendStopPlayback:_playbackSessionId];
+    _playbackSessionId = 0;
+  }
+  
+  [_statsLogTimer invalidate];
+  _statsLogTimer = nil;
+  
+  [self.audioStream stopPlaying];
+  self.audioStream = nil;
+  [self.videoStream stopPlaying];
+  self.videoStream = nil;
+}
+
+- (void)logStats {
+  NLLogNVPInfo(@"(%p) Playback state: %d", self, self.playerState);
+  
+  if (_playAudio && self.audioStream != nil) {
+    DCAdaptiveJitterBuffer *audioBuffer = self.audioStream.buffer;
+    NLLogNVPInfo(@"Audio: %d packets, target delay %lld (%f), real delay %lld (%f)",
+                audioBuffer.numPackets,
+                audioBuffer.targetDelay,
+                (float)audioBuffer.targetDelay / self.audioStream.streamInfo.sampleRate,
+                audioBuffer.bufferLength,
+                (float)audioBuffer.bufferLength / self.audioStream.streamInfo.sampleRate);
+  }
+  
+  if (self.videoStream != nil) {
+    DCAdaptiveJitterBuffer *videoBuffer = self.videoStream.buffer;
+    NLLogNVPInfo(@"Video: %d packets, target delay %lld (%f), real delay %lld (%f)",
+                videoBuffer.numPackets,
+                videoBuffer.targetDelay,
+                (float)videoBuffer.targetDelay / self.videoStream.streamInfo.sampleRate,
+                videoBuffer.bufferLength,
+                (float)videoBuffer.bufferLength / self.videoStream.streamInfo.sampleRate);
+  }
+}
+
+- (StreamStateNG *)getMaster {
+  // Audio is always the master if there is an audio stream
+  if (self.audioStream != nil)
+    return self.audioStream;
+  return self.videoStream;
+}
+
+- (BOOL)packetBuffersAreDry {
+  DCAdaptiveJitterBuffer *audioQueue = nil;
+  if (self.audioStream != nil)
+    audioQueue = self.audioStream.buffer;
+  DCAdaptiveJitterBuffer *videoQueue = self.videoStream.buffer;
+  
+  if((audioQueue != nil && [audioQueue numPackets] == 0) ||
+     [videoQueue numPackets] == 0) {
+    return YES;
+  }
+  
+  return NO;
+}
+
+- (BOOL)packetBuffersAreBuffered {
+  DCAdaptiveJitterBuffer *videoQueue = self.videoStream.buffer;
+  if (!(videoQueue && videoQueue.fullyBuffered)) {
+    return NO;
+  }
+  
+  DCAdaptiveJitterBuffer *audioQueue = nil;
+  if (self.audioStream != nil)
+    audioQueue = self.audioStream.buffer;
+  
+  if (audioQueue && !audioQueue.fullyBuffered) {
+    return NO;
+  }
+  
+  return YES;
+}
+
+- (void)checkBuffering {
+  if (self.playerState == kVideoPlayerPlaying && [self packetBuffersAreDry]) {
+    StreamStateNG *master = [self getMaster];
+    // TODO: Why would master be nil in the situation? Can be caused by tapping 30-second rewind
+    // on iPhone app repeatedly.
+    if (!master) {
+      NLLogNVPWarn(@"Master stream nil while accessing packetQueue: %d",self.playerState);
+      return;
+    }
+    
+    const double stallTimeout = MAX(STALL_TIMEOUT, (double)master.buffer.targetDelay / master.timebase);
+    
+    NSDate *now = [NSDate date];
+    NSTimeInterval timeSinceLastPacket = [now timeIntervalSinceDate:master.lastRecvSystemTime];
+    
+    if (timeSinceLastPacket > stallTimeout) {
+      NLLogNVPWarn(@"Primary stream has stalled - buffering");
+      
+      // ran dry - stop playing and go into the buffering state
+      [self.audioStream stopPlaying];
+      [self.videoStream stopPlaying];
+      
+      self.playerState = kVideoPlayerBuffering;
+      if([self.delegate respondsToSelector:@selector(playerDidStartBuffering:)])
+        [self.delegate playerDidStartBuffering:self];
+      
+      self.lastBufferTime = [NSDate date];
+    }
+  }
+}
+
+- (void)connect {
+  if (!_connection.isConnected) {
+    [_connection connect];
+    self.playerState = kVideoPlayerConnecting;
+  } else {
+    self.playerState = kVideoPlayerConnected;
+  }
+}
+
+- (void)disconnect {
+  if (_connection.isConnected) {
+    [_connection close];
+  }
+  self.playerState = kVideoPlayerDisconnected;
+}
+
+- (void)startPlayingWithPlaybackProfile:(AVProfile)playbackProfile {
+    self.connection.delegate = self;
+    [self startPlayingWithPlaybackProfile:playbackProfile
+                                startTime:nil];
+}
+
+- (void)startPlayingWithPlaybackProfile:(AVProfile)playbackProfile startTime:(NSDate *)startTime {
+  [self reset];
+  self.connection.delegate = self;
+  _playbackSessionId = [NexusVideoPlayer newPlaybackSessionId];
+  
+  NSNumber *startTimeInSecs = nil;
+  if (startTime) {
+    _isLive = NO;
+    startTimeInSecs = [NSNumber numberWithDouble:[startTime timeIntervalSince1970]];
+  }
+  else {
+    _isLive = YES;
+  }
+  
+  NLLogNVPInfo(@"Starting NexusTalk playback session with session id: %d, startTime: %@", _playbackSessionId, startTime ? startTime : @"Live");
+  [_connection sendStartPlayback:playbackProfile
+               playbackSessionId:_playbackSessionId
+                       startTime:startTimeInSecs];
+}
+
+- (void)stopPlaying {
+  [self reset];
+  
+  if (self.playerState != kVideoPlayerDisconnected) {
+    self.playerState = kVideoPlayerStopped;
+  }
+}
+
+- (UIImage *)getSnapshot {
+  return [_renderView snapshot];
+}
+
+- (void)refreshView {
+  [_renderView refreshView];
+}
+
+- (void)clearView {
+  [_renderView clearView];
+}
+
+- (UIView*)view {
+  return _renderView.view;
+}
+
+- (BOOL)isPlaying {
+  return self.playerState == kVideoPlayerPlaying || self.playerState == kVideoPlayerBuffering;
+}
+
+- (void)setVolume:(CGFloat)volume {
+  AudioPlayer *ap = (AudioPlayer *) self.audioStream.player;
+  [ap setVolume:volume];
+}
+
+#pragma mark - VideoPlayerDelegate
+
+- (void)presentFrame:(DecodedFrame *)frame {
+  [_renderView presentFrame:frame];
+  NSData *sceneChangeData = [frame sceneChangeData];
+  
+  NSTimeInterval frameTime = (double)frame.PTS / self.videoStream.timebase;
+  NSDate *realTimestamp = [self.videoStream.startTime dateByAddingTimeInterval:frameTime];
+  
+  if (_lastPresentTimestamp != nil) {
+    NSTimeInterval delta = [realTimestamp timeIntervalSinceDate:_lastPresentTimestamp];
+    if (delta < 0 && delta > -2.0) {
+      realTimestamp = _lastPresentTimestamp; // don't go backwards for small jumps
+    }
+  }
+  _lastPresentTimestamp = realTimestamp;
+  
+  dispatch_async(dispatch_get_main_queue(), ^{
+    if([self.delegate respondsToSelector:@selector(player:didShowFrame:sceneChangeData:timestamp:)])
+      [self.delegate player:self didShowFrame:frame sceneChangeData:sceneChangeData timestamp:realTimestamp];
+  });
+}
+
+#pragma mark - NexusTalkConnectionDelegate
+
+- (void)connection:(NexusTalkConnection*)connection didReceiveErrorCode:(ErrorCode)code andMessage:(NSString*)message {
+  self.playerState = kVideoPlayerDisconnected;
+  if([self.delegate respondsToSelector:@selector(playerDidReceiveConnectionError:)])
+    [self.delegate playerDidReceiveConnectionError:self];
+}
+
+- (void)connection:(NexusTalkConnection*)connection didDisconnectWithError:(NSError*)error {
+  self.playerState = kVideoPlayerDisconnected;
+  if([self.delegate respondsToSelector:@selector(playerDidDisconnect:)])
+    [self.delegate playerDidDisconnect:self];
+}
+
+- (void)connection:(NexusTalkConnection*)connection didEstablishConnection:(NSDictionary*)serverParams {
+  self.playerState = kVideoPlayerConnected;
+  if([self.delegate respondsToSelector:@selector(playerDidEstablishConnection:)])
+    [self.delegate playerDidEstablishConnection:self];
+}
+
+- (void)connection:(NexusTalkConnection*)connection didReceivePlaybackBegin:(PlaybackBegin*)message {
+  if (message.sessionId != _playbackSessionId)
+    return;
+
+  NLLogNVPInfo(@"Got PlaybackBegin");
+
+  for (PlaybackBeginStream *stream in message.channels) {
+    NSDate *startTime = [NSDate dateWithTimeIntervalSince1970:stream.startTime];
+
+    StreamStateNG *streamState = [[StreamStateNG alloc] init];
+    streamState.streamInfo = stream;
+    streamState.startTime = startTime;
+    streamState.offset = 0;
+    streamState.seqNum = 0;
+    streamState.timebase = stream.sampleRate;
+
+    NLLogNVPInfo(@"Setting up stream with codec type %d", stream.codecType);
+
+    BOOL isWifi = [NexusVideoPlayer hasWifiConnection];
+    streamState.buffer = [[DCAdaptiveJitterBuffer alloc] initWithTimebase:stream.sampleRate isLive:_isLive isWifi:isWifi];
+
+    switch (stream.codecType) {
+      case CodecTypeH264: {
+        if (self.videoStream != nil) {
+          NLLogNVPWarn(@"Got duplicate video streams!");
+          return;
+        }
+
+        NLLogNVPInfo(@"Setting up video stream with timebase %d", (unsigned)stream.sampleRate);
+
+        VideoPlayer *videoPlayer = [[VideoPlayer alloc] initWithCodecType:stream.codecType timebase:stream.sampleRate packetBuffer:streamState.buffer privateData:stream.privateData];
+        videoPlayer.delegate = self;
+
+        streamState.type = ST_Video;
+        streamState.player = videoPlayer;
+
+        self.videoStream = streamState;
+      }
+        break;
+
+      case CodecTypeAac:
+      case CodecTypeOpus:
+      case CodecTypeSpeex: {
+        if (self.audioStream != nil) {
+          NLLogNVPWarn(@"Got duplicate audio streams!");
+          return;
+        }
+
+        // If we're not playing audio, we'll pretend the audio stream doesn't exist and skip over it.
+        if (_playAudio) {
+          NSData *privateData = nil;
+          if ([stream.privateData count] > 0) {
+            privateData = [stream privateDataAtIndex:0];
+          }
+
+          NLLogNVPInfo(@"Making audio player");
+          AudioPlayer *audioPlayer = [NexusVideoPlayer audioPlayerWithCodecType:stream.codecType sampleRate:stream.sampleRate packetBuffer:streamState.buffer privateData:privateData];
+
+          streamState.type = ST_Audio;
+          streamState.player = audioPlayer;
+
+          // If audio is AAC, it's from an Axis camera and we should buffer more
+          if (stream.codecType == CodecTypeAac) {
+            streamState.buffer.autoAdjust = NO;
+            streamState.buffer.targetDelay = stream.sampleRate; // 1s
+          }
+
+          self.audioStream = streamState;
+        }
+      }
+        break;
+
+      default:
+        NLLogNVPWarn(@"Unknown media codec in stream: %d", stream.codecType);
+        break;
+    }
+  }
+
+  NLLogNVPInfo(@"Done setting up streams");
+
+  // Setup video stream master (use audio if playing audio)
+  StreamStateNG *masterStream = [self getMaster];
+
+  VideoPlayer *videoPlayer = (VideoPlayer *)self.videoStream.player;
+  if (self.audioStream != nil)
+    [videoPlayer setMaster:masterStream.player];
+
+  self.videoStream.timestampSync = [[DCTimestampSync alloc] initWithMasterTimebase:masterStream.timebase
+                                                                     slaveTimebase:self.videoStream.timebase];
+  [videoPlayer setTimestampSync:self.videoStream.timestampSync];
+
+
+  // Start out buffering - state check will switch to playing when appropriate
+  self.playerState = kVideoPlayerBuffering;
+  if([self.delegate respondsToSelector:@selector(playerDidStartBuffering:)]) {
+    [self.delegate playerDidStartBuffering:self];
+  }
+  self.lastBufferTime = [NSDate date];
+
+  NexusTalkTimerTarget *timerTarget = [[NexusTalkTimerTarget alloc] init];
+  timerTarget.actualTarget = self;
+  _stateCheckTimer = [NSTimer scheduledTimerWithTimeInterval:STATE_CHECK_TIMER_INTERVAL target:timerTarget selector:@selector(timerFired:) userInfo:nil repeats:YES];
+  timerTarget = [[NexusTalkTimerTarget alloc] init];
+  timerTarget.actualTarget = self;
+  _statsLogTimer = [NSTimer scheduledTimerWithTimeInterval:STATS_LOG_TIMER_INTERVAL target:timerTarget selector:@selector(timerFired:) userInfo:nil repeats:YES];
+}
+
+- (void)connection:(NexusTalkConnection*)connection didReceivePlaybackPacket:(PlaybackPacket*)packet {
+  if (packet.sessionId != _playbackSessionId)
+    return;
+  
+  StreamStateNG *stream = nil;
+  if (self.videoStream != nil && packet.channelId == self.videoStream.streamInfo.channelId) {
+    stream = self.videoStream;
+  }
+  else if (self.audioStream != nil && packet.channelId == self.audioStream.streamInfo.channelId) {
+    stream = self.audioStream;
+  }
+  
+  if (stream == nil) {
+    // NLLogNVPWarn(@"Received playback packet for unknown channel %d", packet.channelId);
+    return;
+  }
+  
+  stream.offset += packet.timestampDelta;
+  stream.seqNum += 1; // TODO: udp
+  stream.lastRecvSystemTime = [NSDate date];
+  
+  Packet *to_add = [[Packet alloc] init];
+  to_add.sequenceNumber = stream.seqNum;
+  to_add.PTS = stream.offset;
+  to_add.data = packet.payload;
+  [stream.buffer addPacket:to_add];
+  
+  // Update audio timing if necessary
+  if (stream == self.videoStream && self.audioStream != nil) {
+    int64_t equivPTS = [self.videoStream.timestampSync translateToMasterFrom:to_add.PTS];
+    [self.audioStream.buffer addSlaveTiming:equivPTS];
+  }
+  
+  // Check to see if we can start playing
+  if (self.playerState == kVideoPlayerBuffering) {
+    if ([self packetBuffersAreBuffered]) {
+      // we have enough buffered, start playing
+      DCAdaptiveJitterBuffer *masterBuffer = [self getMaster].buffer;
+      NLLogNVPInfo(@"Finished buffering (%lld, %f, %d) - starting playback",
+                  masterBuffer.bufferLength,
+                  (float)masterBuffer.bufferLength / [self getMaster].timebase,
+                  masterBuffer.numPackets);
+      
+      self.playerState = kVideoPlayerPlaying;
+      if([self.delegate respondsToSelector:@selector(playerDidStartPlaying:)])
+        [self.delegate playerDidStartPlaying:self];
+      
+      // Increase target delay bias
+      if (self.lastBufferTime != nil) {
+        NSTimeInterval interval = -[self.lastBufferTime timeIntervalSinceNow];
+        assert(interval >= 0.0);
+        // Don't add more than 1.5s delay at a time
+        if (interval > 1.5) interval = 1.5;
+        
+        int64_t biasAmount = interval * [self getMaster].timebase;
+        [masterBuffer addBias:biasAmount];
+        NLLogNVPInfo(@"Added (%lld) %f delay after buffering", biasAmount, interval);
+      }
+      
+      [self.audioStream startPlaying];
+      [self.videoStream startPlaying];
+    }
+  }
+}
+
+- (void)connection:(NexusTalkConnection*)connection didReceivePlaybackEnd:(PlaybackEnd*)message {
+  
+  // Hit the end of a recording session. Note that we still may have buffered video, especially in CVR mode, so the player will continue to play until the video stream hits the end of the buffer and stalls.
+  
+  if (message.sessionId != _playbackSessionId)
+    return;
+  
+  _playbackSessionId = 0;
+  
+  if([self.delegate respondsToSelector:@selector(playerHitPlaybackEnd:message:)])
+    [self.delegate playerHitPlaybackEnd:self message:message];
+}
+
+- (void)connection:(NexusTalkConnection *)connection didReceiveRedirect:(Redirect *)redirectMessage {
+    // If no redirect host, log error and return
+    if (!redirectMessage.hasNewHost) {
+        NLLogNVPError(@"NexusTalk error: Redirect packet should have new host but does not");
+        return;
+    }
+    NexusTalkConnection *newConnection = [[NexusTalkConnection alloc] initWithHost:redirectMessage.newHost
+                                                                        cameraUuid:connection.uuid
+                                                                  requireConnected:NO
+                                                                      sessionToken:connection.sessionToken
+                                                                         userAgent:connection.userAgent];
+    
+    // we remove the delegate here so that the teardown process does not interfere with the new 
+    // connection
+    NexusTalkConnection *oldConnection = self.connection;
+    oldConnection.delegate = nil;
+    
+    self.connection = newConnection;
+    self.connection.delegate = self;
+    if ([self.delegate respondsToSelector:@selector(playerDidSwapConnection:withRedirectConnection:)]) {
+        [self.delegate playerDidSwapConnection:oldConnection
+                        withRedirectConnection:self.connection];
+    }
+}
+
+# pragma mark - NexusTalkTimerTargetProtocol
+- (void)timerFired:(NSTimer *)timer {
+  if (timer == _stateCheckTimer) {
+    [self checkBuffering];
+  } else if (timer == _statsLogTimer) {
+    [self logStats];
+  }
+}
+
+@end
diff --git a/NexusVideoPlayer/PlayerBase.h b/NexusVideoPlayer/PlayerBase.h
new file mode 100644
index 0000000..3c8853c
--- /dev/null
+++ b/NexusVideoPlayer/PlayerBase.h
@@ -0,0 +1,21 @@
+//
+//  PlayerBase.h
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 12/16/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#import "DCAdaptiveJitterBuffer.h"
+
+@interface PlayerBase : NSObject
+
+- (id)initWithTimebase:(int)timebase jitterBuffer:(DCAdaptiveJitterBuffer*)jitterBuffer;
+- (void)start;
+- (void)stop;
+
+@property (assign, nonatomic) int timebase;
+@property (nonatomic, readonly) int64_t currentTimestamp;
+@property (nonatomic, strong) DCAdaptiveJitterBuffer *jitterBuffer;
+
+@end
diff --git a/NexusVideoPlayer/PlayerBase.m b/NexusVideoPlayer/PlayerBase.m
new file mode 100644
index 0000000..f3e28ae
--- /dev/null
+++ b/NexusVideoPlayer/PlayerBase.m
@@ -0,0 +1,29 @@
+//
+//  PlayerBase.m
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 12/16/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#import "PlayerBase.h"
+
+@implementation PlayerBase
+
+- (id)initWithTimebase:(int)timebase jitterBuffer:(DCAdaptiveJitterBuffer *)jitterBuffer {
+  if (self = [super init]) {
+    _timebase = timebase;
+    _jitterBuffer = jitterBuffer;
+  }
+  return self;
+}
+
+- (void)start {
+  [self doesNotRecognizeSelector:_cmd];
+}
+
+- (void)stop {
+  [self doesNotRecognizeSelector:_cmd];
+}
+
+@end
diff --git a/NexusVideoPlayer/SWVideoDecoder.h b/NexusVideoPlayer/SWVideoDecoder.h
new file mode 100644
index 0000000..12b5a11
--- /dev/null
+++ b/NexusVideoPlayer/SWVideoDecoder.h
@@ -0,0 +1,19 @@
+//
+//  SWVideoDecoder.h
+//  NexusVideoPlayer
+//
+//  Created by lorenkirkby on 9/19/14.
+//  Copyright (c) 2014 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+#import "VideoDecoderBase.h"
+#import "Nexustalk.pb.h"
+
+@interface SWVideoDecoder : NSObject<VideoDecoder>
+
+- (id)initWithCodecType:(CodecType)codecType privateData:(PBArray*)privateDataArray;
+
+@property (nonatomic, weak) id<VideoDecoderDelegate> delegate;
+
+@end
diff --git a/NexusVideoPlayer/SWVideoDecoder.m b/NexusVideoPlayer/SWVideoDecoder.m
new file mode 100644
index 0000000..5d5a57d
--- /dev/null
+++ b/NexusVideoPlayer/SWVideoDecoder.m
@@ -0,0 +1,46 @@
+//
+//  SWVideoDecoder.m
+//  NexusVideoPlayer
+//
+//  Created by lorenkirkby on 9/19/14.
+//  Copyright (c) 2014 Dropcam. All rights reserved.
+//
+
+#import "SWVideoDecoder.h"
+#import "VideoDecoder.h"
+
+static const uint8_t h264StartCode[] = { 0x00, 0x00, 0x00, 0x01 };
+
+@interface SWVideoDecoder() {
+  VideoDecoder *_decoder;
+}
+@end
+
+@implementation SWVideoDecoder
+
+- (id)initWithCodecType:(CodecType)codecType privateData:(PBArray*)privateDataArray {
+  self = [super init];
+  if (self) {
+    NSMutableData *privateData = [NSMutableData data];
+    [privateDataArray enumerateObjectsUsingBlock:^(NSData *item, NSUInteger idx, BOOL *stop) {
+        [privateData appendBytes:h264StartCode length:4];
+        [privateData appendData:item];
+    }];
+
+    _decoder = [[VideoDecoder alloc] initWithCodec:kVCT_H264 colorSpace:VideoColorSpaceYUV width:0 height:0 privateData:privateData allowChunks:YES];
+  }
+  
+  return self;
+}
+
+- (void)decodeFrame:(NSData*)frameData PTS:(uint64_t)PTS {
+  [_decoder decodeFrame:frameData hasStartCode:NO PTS:PTS];
+  
+  if ([_decoder isFrameReady]) {
+    DecodedFrame *frame = [_decoder getDecodedFrame];
+    [self.delegate onFrameDecoded:frame];
+  }
+}
+
+
+@end
diff --git a/NexusVideoPlayer/Shader.vsh b/NexusVideoPlayer/Shader.vsh
new file mode 100644
index 0000000..1f275ab
--- /dev/null
+++ b/NexusVideoPlayer/Shader.vsh
@@ -0,0 +1,57 @@
+/*
+     File: Shader.vsh
+ Abstract: Vertex shader that passes attributes through to fragment shader.
+  Version: 1.0
+ 
+ Disclaimer: IMPORTANT:  This Apple software is supplied to you by Apple
+ Inc. ("Apple") in consideration of your agreement to the following
+ terms, and your use, installation, modification or redistribution of
+ this Apple software constitutes acceptance of these terms.  If you do
+ not agree with these terms, please do not use, install, modify or
+ redistribute this Apple software.
+ 
+ In consideration of your agreement to abide by the following terms, and
+ subject to these terms, Apple grants you a personal, non-exclusive
+ license, under Apple's copyrights in this original Apple software (the
+ "Apple Software"), to use, reproduce, modify and redistribute the Apple
+ Software, with or without modifications, in source and/or binary forms;
+ provided that if you redistribute the Apple Software in its entirety and
+ without modifications, you must retain this notice and the following
+ text and disclaimers in all such redistributions of the Apple Software.
+ Neither the name, trademarks, service marks or logos of Apple Inc. may
+ be used to endorse or promote products derived from the Apple Software
+ without specific prior written permission from Apple.  Except as
+ expressly stated in this notice, no other rights or licenses, express or
+ implied, are granted by Apple herein, including but not limited to any
+ patent rights that may be infringed by your derivative works or by other
+ works in which the Apple Software may be incorporated.
+ 
+ The Apple Software is provided by Apple on an "AS IS" basis.  APPLE
+ MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
+ THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
+ FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
+ OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
+ 
+ IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
+ OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
+ MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
+ AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
+ STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
+ POSSIBILITY OF SUCH DAMAGE.
+ 
+ Copyright (C) 2012 Apple Inc. All Rights Reserved.
+ 
+ */
+
+attribute vec4 position;
+attribute vec2 texCoord;
+
+varying vec2 texCoordVarying;
+
+void main()
+{
+    gl_Position = position;
+    texCoordVarying = texCoord;
+}
diff --git a/NexusVideoPlayer/Shader_NV12.fsh b/NexusVideoPlayer/Shader_NV12.fsh
new file mode 100644
index 0000000..a63f189
--- /dev/null
+++ b/NexusVideoPlayer/Shader_NV12.fsh
@@ -0,0 +1,30 @@
+/*
+ Copyright (C) 2014 Apple Inc. All Rights Reserved.
+ See LICENSE.txt for this sample’s licensing information
+ 
+ Abstract:
+ 
+ Passthrough shader for displaying CVPixelbuffers
+ 
+ */
+
+varying highp vec2 texCoordVarying;
+precision mediump float;
+
+uniform sampler2D SamplerY;
+uniform sampler2D SamplerUV;
+uniform mat3 colorConversionMatrix;
+
+void main()
+{
+  mediump vec3 yuv;
+  lowp vec3 rgb;
+  
+  // Subtract constants to map the video range start at 0
+  yuv.x = (texture2D(SamplerY, texCoordVarying).r - (16.0/255.0));
+  yuv.yz = (texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5));
+  
+  rgb = colorConversionMatrix * yuv;
+  
+  gl_FragColor = vec4(rgb,1);
+}
\ No newline at end of file
diff --git a/NexusVideoPlayer/Shader_YUV420.fsh b/NexusVideoPlayer/Shader_YUV420.fsh
new file mode 100644
index 0000000..8acdce5
--- /dev/null
+++ b/NexusVideoPlayer/Shader_YUV420.fsh
@@ -0,0 +1,77 @@
+/*
+     File: Shader.fsh
+ Abstract: Fragment shader for converting Y/UV textures to RGB.
+  Version: 1.0
+ 
+ Disclaimer: IMPORTANT:  This Apple software is supplied to you by Apple
+ Inc. ("Apple") in consideration of your agreement to the following
+ terms, and your use, installation, modification or redistribution of
+ this Apple software constitutes acceptance of these terms.  If you do
+ not agree with these terms, please do not use, install, modify or
+ redistribute this Apple software.
+ 
+ In consideration of your agreement to abide by the following terms, and
+ subject to these terms, Apple grants you a personal, non-exclusive
+ license, under Apple's copyrights in this original Apple software (the
+ "Apple Software"), to use, reproduce, modify and redistribute the Apple
+ Software, with or without modifications, in source and/or binary forms;
+ provided that if you redistribute the Apple Software in its entirety and
+ without modifications, you must retain this notice and the following
+ text and disclaimers in all such redistributions of the Apple Software.
+ Neither the name, trademarks, service marks or logos of Apple Inc. may
+ be used to endorse or promote products derived from the Apple Software
+ without specific prior written permission from Apple.  Except as
+ expressly stated in this notice, no other rights or licenses, express or
+ implied, are granted by Apple herein, including but not limited to any
+ patent rights that may be infringed by your derivative works or by other
+ works in which the Apple Software may be incorporated.
+ 
+ The Apple Software is provided by Apple on an "AS IS" basis.  APPLE
+ MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
+ THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
+ FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
+ OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
+ 
+ IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
+ OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
+ MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
+ AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
+ STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
+ POSSIBILITY OF SUCH DAMAGE.
+ 
+ Copyright (C) 2012 Apple Inc. All Rights Reserved.
+ 
+ */
+
+uniform sampler2D SamplerY;
+uniform sampler2D SamplerU;
+uniform sampler2D SamplerV;
+
+varying highp vec2 texCoordVarying;
+
+void main()
+{
+    mediump vec3 yuv;
+    lowp vec3 rgb;
+    
+    yuv.x = texture2D(SamplerY, texCoordVarying).r;
+    yuv.y = texture2D(SamplerU, texCoordVarying).r - 0.5;
+    yuv.z = texture2D(SamplerV, texCoordVarying).r - 0.5;
+    
+    // BT.601, which is the standard for SDTV is provided as a reference
+    
+    /*rgb = mat3(    1,       1,     1,
+                   0, -.34413, 1.772,
+               1.402, -.71414,     0) * yuv;*/
+     
+    
+    // Using BT.709 which is the standard for HDTV
+    rgb = mat3(      1,       1,      1,
+                     0, -.18732, 1.8556,
+               1.57481, -.46813,      0) * yuv;
+    
+    gl_FragColor = vec4(rgb, 1);
+}
+
diff --git a/NexusVideoPlayer/SpeexStream.h b/NexusVideoPlayer/SpeexStream.h
new file mode 100644
index 0000000..af5a445
--- /dev/null
+++ b/NexusVideoPlayer/SpeexStream.h
@@ -0,0 +1,33 @@
+//
+//  SpeexStream.h
+//  Dropcam
+//
+//  Created by Yohannes Kifle on 25/06/2013.
+//
+//
+
+#import <Foundation/Foundation.h>
+#import <AudioToolbox/AudioToolbox.h>
+#import "AudioStream.h"
+
+@protocol SpeexStreamDelegate;
+
+@interface SpeexStream : NSObject <AudioStreamDelegate>
+
+@property (weak, nonatomic)id <SpeexStreamDelegate> delegate;
+
+- (id) init;
+- (BOOL)setup;
+- (BOOL)start;
+- (void)stop;
+- (float)getLevel;
+- (void)setGain:(Float32)gain;
+- (BOOL)isRunning;
+
+@end
+
+@protocol SpeexStreamDelegate <NSObject>
+
+- (void)speexEncodedFrame:(char *)frame length:(size_t)length inStartTime:(const AudioTimeStamp *)inStartTime;
+
+@end
diff --git a/NexusVideoPlayer/SpeexStream.m b/NexusVideoPlayer/SpeexStream.m
new file mode 100644
index 0000000..3fd7de0
--- /dev/null
+++ b/NexusVideoPlayer/SpeexStream.m
@@ -0,0 +1,174 @@
+//
+//  SpeexStream.m
+//  Dropcam
+//
+//  Created by Yohannes Kifle on 25/06/2013.
+//
+//
+
+#import "SpeexStream.h"
+#import <speex/speex.h>
+
+static const int sample_rate = 16000;
+static const int frames_per_packet = 5;
+
+@interface SpeexStream () {
+  SpeexBits speex_bits;
+}
+
+@property (strong, nonatomic) AudioStream *audioStream;
+@property (assign, nonatomic) void *speex_enc_state;
+@property (strong, nonatomic) NSMutableData *nextAudio;
+@property (strong, nonatomic) NSNumber *level; // The volume
+
+@property (assign, nonatomic) double abs_diff;
+@property (assign, nonatomic) uint64_t rms_total;
+@property (assign, nonatomic) int32_t val_lpf;
+@property (assign, nonatomic) int32_t val_hpf;
+@property (assign, nonatomic) int32_t val_prev;
+@property (assign, nonatomic) double rms_prev;
+@property (assign, nonatomic) double rms;
+@property (assign, nonatomic) int num_samples_in_total;
+
+@end
+
+@implementation SpeexStream
+
+- (id) init
+{
+  if (self = [super init]) {
+    _audioStream = [[AudioStream alloc] initWithSampleRate:sample_rate];
+    _level = [NSNumber numberWithFloat:1.0];
+    
+    _rms_total = 0;
+    _val_lpf = 0;
+    _val_hpf = 0;
+    _val_prev = 0;
+    _rms_prev = 0;
+    _rms = 100000;
+    _num_samples_in_total = 0;
+  }
+  return self;
+}
+
+- (BOOL)setup {
+  [self stop];
+  
+  self.nextAudio = [[NSMutableData alloc] initWithCapacity:4096];
+  
+  self.speex_enc_state = speex_encoder_init(&speex_wb_mode);
+  speex_bits_init(&speex_bits);
+  
+  int quality = 6;
+  speex_encoder_ctl(self.speex_enc_state, SPEEX_SET_QUALITY, &quality);
+  
+  int complexity = 3;
+  speex_encoder_ctl(self.speex_enc_state, SPEEX_SET_COMPLEXITY, &complexity);
+  
+  int frameSize;
+  speex_encoder_ctl(self.speex_enc_state, SPEEX_GET_FRAME_SIZE, &frameSize);
+  
+  int bufferSize = frameSize * sizeof(int16_t) * frames_per_packet;
+  self.audioStream.bufferSize = bufferSize;
+  self.audioStream.delegate = self;
+  
+  return [self.audioStream setup];
+}
+
+- (BOOL)start {
+  if (!self.audioStream.isRunning) {
+    return [self.audioStream start];
+  }
+  return YES;
+}
+
+- (void)stop {
+  if ([self.audioStream isRunning]) {
+    [self.audioStream stop];
+  }
+  
+  if (self.speex_enc_state) {
+    speex_encoder_destroy(self.speex_enc_state);
+    speex_bits_destroy(&speex_bits);
+    
+    self.speex_enc_state = NULL;
+  }
+  
+  self.nextAudio = nil;
+}
+
+- (float)getLevel {
+  return self.abs_diff;
+}
+
+- (void)setGain:(Float32)gain {
+  self.level = [NSNumber numberWithFloat:gain];
+}
+
+- (BOOL)isRunning {
+  return self.audioStream.isRunning;
+}
+
+- (void)audioQueueInputCallbackInBuffer:(AudioQueueBufferRef)inBuffer
+                            inStartTime:(const AudioTimeStamp *)inStartTime
+             inNumberPacketDescriptions:(UInt32)inNumberPacketDescriptions
+                          inPacketDescs:(const AudioStreamPacketDescription *)inPacketDescs
+{
+  [self.nextAudio appendBytes:inBuffer->mAudioData length:inBuffer->mAudioDataByteSize];
+  
+  int frameSize;
+  speex_encoder_ctl(self.speex_enc_state, SPEEX_GET_FRAME_SIZE, &frameSize);
+  
+  int frameBytes = frameSize * sizeof(int16_t);
+  int packetSize = frameBytes * frames_per_packet;
+  
+  while([self.nextAudio length] >= packetSize) {
+    void *framePtr = [self.nextAudio mutableBytes];
+    
+    int16_t *samples = (int16_t*)framePtr;
+    int num_samples = inBuffer->mAudioDataByteSize / sizeof(int16_t);
+    
+    for (int i = 0; i < num_samples; i++) {
+      int16_t val = samples[i];
+      self.val_hpf = val - self.val_lpf;
+      self.val_lpf = (self.val_prev+val) / 2;
+      self.val_prev = val;
+      self.rms_total += self.val_hpf * self.val_hpf;
+    }
+    
+    self.num_samples_in_total += num_samples;
+    
+    if (self.num_samples_in_total > sample_rate) {
+      self.rms_prev = self.rms;
+      self.rms = (double) sqrt(self.rms_total / self.num_samples_in_total);
+      
+      //compute audio value that gets sent to nexus
+      self.abs_diff = fabs(self.rms - self.rms_prev);
+      
+      self.num_samples_in_total = 0;
+      self.rms_total = 0;
+    }
+    
+    for (int i = 0; i < frames_per_packet; i++) {
+//      int16_t *fptr = (int16_t *)framePtr;
+//      for (int j = 0; j < frameSize; j++, fptr++) {
+//        *fptr = (int16_t)(*fptr * [_level floatValue]);
+//      }
+      speex_encode_int(self.speex_enc_state, framePtr, &speex_bits);
+      framePtr += frameBytes;
+    }
+    
+    speex_bits_insert_terminator(&speex_bits);
+    
+    char encBuf[4096];
+    int numBytes = speex_bits_write_whole_bytes(&speex_bits, (char*)encBuf, sizeof(encBuf));
+    if ([self.delegate respondsToSelector:@selector(speexEncodedFrame:length:inStartTime:)]) {
+      [self.delegate speexEncodedFrame:encBuf length:numBytes inStartTime:inStartTime];
+    }
+    speex_bits_reset(&speex_bits);
+    
+    [self.nextAudio replaceBytesInRange:NSMakeRange(0, packetSize) withBytes:NULL length:0];
+  }
+}
+
+@end
diff --git a/NexusVideoPlayer/VideoDecoderBase.h b/NexusVideoPlayer/VideoDecoderBase.h
new file mode 100644
index 0000000..8415c91
--- /dev/null
+++ b/NexusVideoPlayer/VideoDecoderBase.h
@@ -0,0 +1,27 @@
+//
+//  VideoDecoder.h
+//  NexusVideoPlayer
+//
+//  Created by lorenkirkby on 9/19/14.
+//  Copyright (c) 2014 Dropcam. All rights reserved.
+//
+
+#ifndef NexusVideoPlayer_VideoDecoder_h
+#define NexusVideoPlayer_VideoDecoder_h
+
+#import "VideoDecoder.h"
+#import "DecodedFrame.h"
+
+@protocol VideoDecoderDelegate <NSObject>
+
+- (void)onFrameDecoded:(DecodedFrame*)decodedFrame;
+
+@end
+
+@protocol VideoDecoder <NSObject>
+
+- (void)decodeFrame:(NSData*)frameData PTS:(uint64_t)PTS;
+
+@end
+
+#endif
diff --git a/NexusVideoPlayer/VideoPlayer.h b/NexusVideoPlayer/VideoPlayer.h
new file mode 100644
index 0000000..ddcfbe8
--- /dev/null
+++ b/NexusVideoPlayer/VideoPlayer.h
@@ -0,0 +1,28 @@
+//
+//  VideoPlayer.h
+//  NexusVideoPlayer
+//
+//  Created by Loren Kirkby on 10/6/12.
+//  Copyright (c) 2012 Dropcam. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+#import "Nexustalk.pb.h"
+#import "DCAdaptiveJitterBuffer.h"
+#import "DCTimestampSync.h"
+#import "DecodedFrame.h"
+#import "PlayerBase.h"
+#import "VideoDecoderBase.h"
+
+@protocol VideoPlayerDelegate <NSObject>
+- (void)presentFrame:(DecodedFrame *)frame;
+@end
+
+@interface VideoPlayer : PlayerBase<VideoDecoderDelegate>
+
+- (id)initWithCodecType:(CodecType)codecType timebase:(int)timebase packetBuffer:(DCAdaptiveJitterBuffer*)buffer privateData:(PBArray*)privateDataArray;
+
+@property (nonatomic, weak) id<VideoPlayerDelegate> delegate;
+@property (nonatomic, weak) PlayerBase *master;
+@property (nonatomic, strong) DCTimestampSync *timestampSync;
+@end
diff --git a/NexusVideoPlayer/VideoPlayer.m b/NexusVideoPlayer/VideoPlayer.m
new file mode 100644
index 0000000..d3c61c0
--- /dev/null
+++ b/NexusVideoPlayer/VideoPlayer.m
@@ -0,0 +1,276 @@
+//
+//  VideoPlayer.m
+//  NexusVideoPlayer
+//
+//  Created by Loren Kirkby on 10/6/12.
+//  Copyright (c) 2012 Dropcam. All rights reserved.
+//
+
+#import "VideoPlayer.h"
+#import "SWVideoDecoder.h"
+#import "HWVideoDecoder.h"
+#import "H264.h"
+#import "DebugLog.h"
+
+#include "JitterTunables.h"
+#import  "NLCommonLoggingNVP.h"
+//#define LOG_TIMING // (Note: slows everything down quite a bit)
+
+#define FRAME_DROP_THRESHOLD_BFRAMES -0.5
+#define FRAME_DROP_THRESHOLD_NOBFRAMES -0.033
+
+#define SHUTDOWN_CONDITION_RUNNING 0
+#define SHUTDOWN_CONDITION_SHUTTING_DOWN 1
+#define SHUTDOWN_CONDITION_FINISHED 2
+
+@interface VideoPlayer()
+- (void)videoPlaybackThreadEntry;
+
+@property (assign, nonatomic) int64_t lastPacketPTS;
+@property (strong, nonatomic) NSDate *lastPacketSystemTime;
+@property (assign, nonatomic) float playbackRate;
+@property (assign, nonatomic) float dropThreshold;
+
+@property (strong, nonatomic) id<VideoDecoder> decoder;
+@property (strong, nonatomic) NSThread *videoPlaybackThread;
+@property (strong, nonatomic) NSConditionLock *shutdownConditionLock;
+@property (strong, nonatomic) PBArray *privateDataArray;
+
+@property (weak, nonatomic) DCAdaptiveJitterBuffer *jitterBuffer;
+
+@property (assign, nonatomic) int consecutiveDroppedFrames;
+@property (assign, nonatomic) int totalDroppedFrames;
+@property (assign, nonatomic) BOOL forcePresent;
+@end
+
+@implementation VideoPlayer
+
+- (id)initWithCodecType:(CodecType)codecType timebase:(int)timebase packetBuffer:(DCAdaptiveJitterBuffer*)buffer privateData:(PBArray*)privateDataArray {
+  self = [super initWithTimebase:timebase jitterBuffer:buffer];
+  if (self) {
+    _lastPacketPTS = INT64_MIN;
+    _lastPacketSystemTime = nil;
+    _playbackRate = 1.0;
+    _dropThreshold = FRAME_DROP_THRESHOLD_NOBFRAMES;
+    
+    _privateDataArray = privateDataArray;
+    _shutdownConditionLock = [[NSConditionLock alloc] init];
+    _master = nil;
+    
+    _consecutiveDroppedFrames = 0;
+    _totalDroppedFrames = 0;
+    _forcePresent = NO;
+  }
+  
+  return self;
+}
+
+- (id<VideoDecoder>)decoder {
+  if (_decoder == nil) {
+    if ([HWVideoDecoder isSupported]) {
+      HWVideoDecoder *hwvd = [[HWVideoDecoder alloc] initWithCodecType:CodecTypeH264 privateData:self.privateDataArray timebase:self.timebase];
+      hwvd.delegate = self;
+      _decoder = hwvd;
+    }
+    else {
+      SWVideoDecoder *swvd = [[SWVideoDecoder alloc] initWithCodecType:CodecTypeH264 privateData:self.privateDataArray];
+      swvd.delegate = self;
+      _decoder = swvd;
+    }
+  }
+  
+  return _decoder;
+}
+
+- (void)start {
+  NSAssert(self.videoPlaybackThread == nil, @"Starting video player when already started.");
+  
+  self.lastPacketPTS = INT64_MIN;
+  self.lastPacketSystemTime = nil;
+  self.playbackRate = 1.0f;
+  self.dropThreshold = FRAME_DROP_THRESHOLD_NOBFRAMES;
+  
+  self.consecutiveDroppedFrames = 0;
+  self.totalDroppedFrames = 0;
+  
+  [self.shutdownConditionLock lock];
+  [self.shutdownConditionLock unlockWithCondition:SHUTDOWN_CONDITION_RUNNING];
+  
+  self.videoPlaybackThread = [[NSThread alloc] initWithTarget:self selector:@selector(videoPlaybackThreadEntry) object:nil];
+  [self.videoPlaybackThread start];
+}
+
+- (void)stop {
+  if (self.videoPlaybackThread == nil) {
+    NLLogNVPWarn(@"Stopping video player when already stopped.");
+    return;
+  }
+  
+  [self.shutdownConditionLock lock];
+  [self.shutdownConditionLock unlockWithCondition:SHUTDOWN_CONDITION_SHUTTING_DOWN];
+  
+  [self.videoPlaybackThread cancel];
+  // Wake up the video player thread so it doesn't hang
+  [self.jitterBuffer cancelWaiters];
+  
+  [self.shutdownConditionLock lockWhenCondition:SHUTDOWN_CONDITION_FINISHED];
+  [self.shutdownConditionLock unlock];
+  
+  self.videoPlaybackThread = nil;
+}
+
+- (BOOL)waitForPresentationTime:(NSDate *)presentationTime {
+  BOOL interrupted = NO;
+  if ([self.shutdownConditionLock lockWhenCondition:SHUTDOWN_CONDITION_SHUTTING_DOWN beforeDate:presentationTime]) {
+    [self.shutdownConditionLock unlock];
+    interrupted = YES;
+  }
+  return !interrupted;
+}
+
+- (NSTimeInterval)deltaForPTS:(int64_t)PTS {
+  // Compute correct time to display this frame
+  int64_t timestamp = self.currentTimestamp;
+  if (timestamp == INT64_MIN) return 0.0;
+  int64_t masterPTS = [self.timestampSync translateFrom:timestamp];
+  
+  int64_t expectedPacketDuration = (PTS - self.lastPacketPTS) / self.playbackRate;
+  int64_t expectedPTS = self.lastPacketPTS + expectedPacketDuration;
+  
+  int64_t deltaPTS = expectedPTS - masterPTS;
+  NSTimeInterval delta = (double)deltaPTS / self.timebase;
+  
+  return delta;
+}
+
+- (void)dropFrame:(DecodedFrame*)frame withDelta:(NSTimeInterval)delta {
+  // Frame is too far behind clock source - drop the frame.
+  
+  //NLLogNVPInfo(@"Frame (%lld) too far behind clock source.  Dropping w/ delta %fs.", frame.PTS, delta);
+  self.consecutiveDroppedFrames++;
+  self.totalDroppedFrames++;
+  
+  if (self.consecutiveDroppedFrames > 5) {
+    // Dropping too many frames, restart clock at this frame, and make sure the next frame is presented
+    NLLogNVPWarn(@"Forcing next frame presentation");
+    self.lastPacketPTS = frame.PTS;
+    self.lastPacketSystemTime = [NSDate date];
+    self.consecutiveDroppedFrames = 0;
+    self.forcePresent = YES;
+  }
+  
+  if (self.totalDroppedFrames % 30 == 0) {
+    NLLogNVPWarn(@"Video player dropped %d frames so far", self.totalDroppedFrames);
+  }
+}
+
+- (void)presentFrame:(DecodedFrame *)frame withDelta:(NSTimeInterval)delta {
+  
+#ifdef LOG_TIMING
+  NLLogNVPInfo(@"Packet PTS: %lld, waiting: %fs (# packets buffered: %d)", frame.PTS, delta, [self.jitterBuffer numPackets]);
+#endif
+  
+  NSDate *presentationTime = [NSDate dateWithTimeIntervalSinceNow:delta];
+  
+  if (delta <= 0.0) {
+    // Just show the frame
+    [self.delegate presentFrame:frame];
+  } else if (delta < 5.0) {
+    if ([self waitForPresentationTime:presentationTime]) {
+      [self.delegate presentFrame:frame];
+    }
+  } else {
+    NLLogNVPWarn(@"Received frame with bizarro timing (%fs in the future).  Dropping frame.", delta);
+    return;
+  }
+  
+  self.consecutiveDroppedFrames = 0;
+  self.lastPacketPTS = frame.PTS;
+  self.lastPacketSystemTime = presentationTime;
+  self.forcePresent = NO;
+}
+
+// VideoDecoderDelegate
+
+- (void)onFrameDecoded:(DecodedFrame *)frame {
+  if (self.master == nil) {
+    self.playbackRate = [self.jitterBuffer playSpeedForPTS:frame.PTS];
+  }
+  
+  NSTimeInterval delta = [self deltaForPTS:frame.PTS];
+  if (delta < self.dropThreshold && !self.forcePresent) {
+    // Drop this frame
+    [self dropFrame:frame withDelta:delta];
+    return;
+  }
+  
+  if (frame.PTS < self.lastPacketPTS) {
+    // Don't play backwards in time
+    [self dropFrame:frame withDelta:delta];
+    return;
+  }
+  
+  [self presentFrame:frame withDelta:delta];
+}
+
+- (void)videoPlaybackThreadEntry {
+  NSThread *currentThread = [NSThread currentThread];
+  
+  [currentThread setName:@"Video decode/playback"];
+  
+  // All decoding and presentation happens in this thread so bump the priority
+  // so that any other CPU activity doesn't cause video stutter.
+  [currentThread setThreadPriority:1.0];
+  
+  BOOL detectedBframes = NO;
+  
+  while (![currentThread isCancelled]) {
+    @autoreleasepool {
+      int64_t PTS = [self.jitterBuffer peekPTS];
+      if (PTS == INT64_MIN) {
+        // No packet available
+        [self.jitterBuffer waitForPacket];
+        continue;
+      }
+      
+      Packet *packet = [self.jitterBuffer nextPacket];
+      if (self.lastPacketSystemTime == nil) {
+        // First packet
+        self.lastPacketPTS = packet.PTS;
+        self.lastPacketSystemTime = [NSDate date];
+      }
+      
+      // Check if this is a B-frame
+      if (!detectedBframes) {
+        H264SliceType sliceType = getH264SliceType(packet.data);
+        if (sliceType == SLICE_B_ONLY || sliceType == SLICE_B) {
+          // Detected a B-frame, set the property
+          detectedBframes = YES;
+          self.dropThreshold = FRAME_DROP_THRESHOLD_BFRAMES;
+          NLLogNVPInfo(@"Setting drop threshold to %f", self.dropThreshold);
+        }
+      }
+      
+      [self.decoder decodeFrame:packet.data PTS:packet.PTS];
+    }
+  }
+  
+  [self.shutdownConditionLock lock];
+  [self.shutdownConditionLock unlockWithCondition:SHUTDOWN_CONDITION_FINISHED];
+  NLLogNVPInfo(@"Video decode thread shutting down");
+}
+
+- (int64_t)currentTimestamp {
+  if (self.master != nil) {
+    return self.master.currentTimestamp;
+  }
+  else {
+    if (self.lastPacketSystemTime == nil) return INT64_MIN;
+    
+    NSTimeInterval secs = [[NSDate date] timeIntervalSinceDate:self.lastPacketSystemTime];
+    int64_t ts = self.lastPacketPTS + (secs * self.timebase);
+    return ts;
+  }
+}
+
+@end
diff --git a/NexusVideoPlayer/jitter/AdaptiveJitterBuffer.cpp b/NexusVideoPlayer/jitter/AdaptiveJitterBuffer.cpp
new file mode 100644
index 0000000..d60928f
--- /dev/null
+++ b/NexusVideoPlayer/jitter/AdaptiveJitterBuffer.cpp
@@ -0,0 +1,255 @@
+//
+//  AdaptiveJitterBuffer.cpp
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 11/15/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#include <cstdlib>
+#include <cassert>
+#include <limits>
+#include <sys/time.h>
+
+#include "AdaptiveJitterBuffer.hpp"
+#include "JitterTunables.h"
+
+static int sign(int64_t input) {
+  if (input < 0) return -1;
+  else if (input > 0) return 1;
+  return 0;
+}
+
+static int64_t tv_usecs(const struct timeval &tv) {
+  return tv.tv_sec * (1000*1000) + tv.tv_usec;
+}
+
+// Round to nearest multiple (halfway rounds away from 0)
+int64_t ajb_round_to_multiple(int64_t num, int64_t factor) {
+  assert(factor > 0);
+  int64_t pos_num = llabs(num);
+  int64_t rem = pos_num % factor;
+  if (rem < factor / 2) {
+    return sign(num) * (pos_num - rem);
+  }
+  else {
+    return sign(num) * (pos_num + (factor - rem));
+  }
+}
+
+AdaptiveJitterBuffer::AdaptiveJitterBuffer(int timebase, bool is_live, bool is_wifi)
+: lossFactor((is_live) ? AJB_LOSS_FACTOR_LIVE : AJB_LOSS_FACTOR_RECORDED),
+  delayCalculator(timebase * AJB_MAX_TIMING_AGE, timebase * lossFactor),
+  slaveDelayCalculator(timebase * AJB_MAX_TIMING_AGE, timebase * lossFactor),
+
+  timebase(timebase),
+  isLive(is_live),
+  autoAdjust(isLive),
+
+  bufferBackPTS(std::numeric_limits<int64_t>::min()),
+  bufferBackAddedTime(std::numeric_limits<int64_t>::min()),
+  targetDelay(0), // set below
+  delayBias(0),
+
+  optimizeDelayCounter(0)
+{
+  if (is_live) {
+    if (is_wifi) {
+      targetDelay = timebase * AJB_INITIAL_DELAY_LIVE_WIFI;
+    }
+    else {
+      targetDelay = timebase * AJB_INITIAL_DELAY_LIVE_NONWIFI;
+    }
+  }
+  else {
+    targetDelay = timebase * AJB_INITIAL_DELAY_RECORDED;
+  }
+}
+
+#ifdef DEBUG
+AdaptiveJitterBuffer::AdaptiveJitterBuffer(int timebase, int loss_factor)
+: lossFactor(loss_factor / (float)timebase),
+  delayCalculator(timebase * AJB_MAX_TIMING_AGE, loss_factor),
+  slaveDelayCalculator(timebase * AJB_MAX_TIMING_AGE, loss_factor),
+
+  timebase(timebase),
+  isLive(true),
+  autoAdjust(false),
+
+  bufferBackPTS(std::numeric_limits<int64_t>::min()),
+  bufferBackAddedTime(std::numeric_limits<int64_t>::min()),
+  targetDelay(0),
+  delayBias(0),
+
+  optimizeDelayCounter(0)
+{
+#ifndef DEBUG
+  // Shouldn't use this constructor in production
+  fprintf(stderr, "Used AdaptiveJitterBuffer test constructor in non-debug build!\n");
+  abort();
+#endif
+}
+#endif
+
+void AdaptiveJitterBuffer::checkAutoAdjustDelay()
+{
+  if (!autoAdjust) return;
+  
+  // Automatically adjust delay
+  if (optimizeDelayCounter > AJB_OPTIMIZE_DELAY_INTERVAL_PACKETS) {
+    optimizeDelay();
+  }
+  else {
+    optimizeDelayCounter++;
+  }
+}
+
+int AdaptiveJitterBuffer::computePacketDelay(int64_t pts, int64_t time_added)
+{
+  int64_t pts_diff, time_diff;
+  if (buffer.size() > 0) {
+    pts_diff = pts - buffer.back().PTS;
+    time_diff = time_added - buffer.back().time_added;
+  }
+  else {
+    if (bufferBackPTS == std::numeric_limits<int64_t>::min() && bufferBackAddedTime == std::numeric_limits<int64_t>::min()) {
+      // First packet to be added
+      return 0;
+    }
+    
+    pts_diff = pts - bufferBackPTS;
+    time_diff = time_added - bufferBackAddedTime;
+  }
+  
+  // TODO: is there a better way to estimate packet lateness?
+  int delay = static_cast<int>(time_diff - pts_diff);
+  // Negative delays don't mean anything in this model
+  if (delay < 0) delay = 0;
+  
+  return delay;
+}
+
+////////////////////////////////////////
+// Public methods
+////////////////////////////////////////
+
+int64_t AdaptiveJitterBuffer::getBufferLength() const {
+  if (buffer.size() == 0) return 0;
+  return buffer.back().PTS + buffer.back().span - buffer.front().PTS;
+}
+
+void AdaptiveJitterBuffer::optimizeDelay()
+{
+  if (delayCalculator.numTimingsValidFor(bufferBackPTS) < AJB_MIN_TIMING_SAMPLES) {
+    printf("Not enough samples to optimize delay\n");
+    optimizeDelayCounter = 0;
+    return;
+  }
+  
+  auto optimal = delayCalculator.getOptimalDelay(AJB_MAX_PACKET_LOSS);
+  if (isLive && optimal > AJB_LIVE_MAX_BUFFER_SIZE * timebase) {
+    // TODO: cannot achieve both max packet loss and max buffer size in live!
+    //panic = true;
+  }
+  else {
+    //panic = false;
+  }
+  
+  auto slaveOptimal = slaveDelayCalculator.getOptimalDelay(AJB_MAX_PACKET_LOSS);
+  if (slaveDelayCalculator.numTimingsValidFor(bufferBackPTS) >= AJB_MIN_TIMING_SAMPLES) {
+    printf("optimal = %lld, slaveOptimal = %lld\n", optimal, slaveOptimal);
+    optimal = std::max(optimal, slaveOptimal);
+  }
+  
+  // target delay should be at least delayBias
+  targetDelay = std::max(delayBias, optimal);
+  optimizeDelayCounter = 0;
+  
+  delayBias -= (timebase / 10); // ~20s to reduce delayBias by 1s
+  if (delayBias < 0) delayBias = 0;
+}
+
+void AdaptiveJitterBuffer::addPacket(JBPacket packet, int64_t *time_added)
+{
+  if (time_added == NULL) {
+    struct timeval now;
+    gettimeofday(&now, NULL);
+    int64_t useconds_since_start = tv_usecs(now) - tv_usecs(bufferStartedTime);
+    packet.time_added = (useconds_since_start * timebase) / 1000000;
+  }
+  else {
+    packet.time_added = *time_added;
+  }
+  
+  int delay = computePacketDelay(packet.PTS, packet.time_added);
+  
+  // Update timings
+  delayCalculator.addTiming(packet.PTS, delay);
+  
+  auto it = buffer.insert(packet, HINT_BACK);
+  if (it == buffer.end()-1) {
+    // This is the new back element
+    bufferBackPTS = packet.PTS;
+    bufferBackAddedTime = packet.time_added;
+  }
+  
+#ifdef DEBUG
+  assert(buffer.debugIsSorted());
+#endif
+}
+
+void AdaptiveJitterBuffer::addSlaveTiming(int64_t equiv_pts, int64_t *time_added)
+{
+  if (time_added == NULL) {
+    struct timeval now;
+    gettimeofday(&now, NULL);
+    int64_t useconds_since_start = tv_usecs(now) - tv_usecs(bufferStartedTime);
+    int64_t pkt_time_added = (useconds_since_start * timebase) / 1000000;
+  
+    slaveDelayCalculator.addTiming(equiv_pts, computePacketDelay(equiv_pts, pkt_time_added));
+  }
+  else {
+    slaveDelayCalculator.addTiming(equiv_pts, computePacketDelay(equiv_pts, *time_added));
+  }
+}
+
+int64_t AdaptiveJitterBuffer::peekPTS() const {
+  if (buffer.size() == 0) {
+    return std::numeric_limits<int64_t>::min();
+  }
+  return buffer.front().PTS;
+}
+
+bool AdaptiveJitterBuffer::nextPacket(JBPacket &out_packet)
+{
+  // Auto adjustment
+  checkAutoAdjustDelay();
+  
+  // Let the consumer worry about timing of scheduled packets
+  if (buffer.size() > 0) {
+    out_packet = buffer.front();
+    buffer.pop_front();
+    return true;
+  }
+  else {
+    return false;
+  }
+}
+
+float AdaptiveJitterBuffer::getPlaySpeedForPTS(int64_t pts)
+{
+  int64_t playbackTimestamp = bufferBackPTS - targetDelay;
+  float extra_buf_amt = (playbackTimestamp - pts) / (float)timebase;
+  float speed = AJB_PLAY_SPEED_FACTOR_CUBE * (extra_buf_amt * extra_buf_amt * extra_buf_amt);
+  speed += extra_buf_amt * AJB_PLAY_SPEED_FACTOR_LIN;
+  speed += 1.0f;
+  if (speed > AJB_MAX_PLAY_SPEED) speed = AJB_MAX_PLAY_SPEED;
+  if (speed < AJB_MIN_PLAY_SPEED) speed = AJB_MIN_PLAY_SPEED;
+  
+  if (!isLive && speed > 1.0f) {
+    // Don't try to play fast if playing CVR
+    speed = 1.0f;
+  }
+  
+  return speed;
+}
\ No newline at end of file
diff --git a/NexusVideoPlayer/jitter/AdaptiveJitterBuffer.hpp b/NexusVideoPlayer/jitter/AdaptiveJitterBuffer.hpp
new file mode 100644
index 0000000..4252680
--- /dev/null
+++ b/NexusVideoPlayer/jitter/AdaptiveJitterBuffer.hpp
@@ -0,0 +1,109 @@
+//
+//  AdaptiveJitterBuffer.h
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 11/15/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#ifndef __NexusVideoPlayer__AdaptiveJitterBuffer__
+#define __NexusVideoPlayer__AdaptiveJitterBuffer__
+
+#include <sys/time.h>
+#include <memory>
+
+#include "DelayCalculator.hpp"
+#include "SortedDeque.hpp"
+
+/* AdaptiveJitterBuffer
+ *
+ * The jitter buffer maintains a correctly ordered list of packets
+ * (lowest sequence number first) and some timing information. The jitter buffer
+ * performs three basic operations:
+ *
+ * 1. Adding packets to the buffer. This places the packet at the correct position
+ *    in the buffer by sequence number
+ * 2. Playing the next packet. The buffer returns the first (lowest sequence number)
+ *    packet in the buffer
+ * 3. Playback timing. The buffer can be queried for where the playhead should be
+ *    right now (playbackTimestamp), which is a function of the amount of
+ *    delay/buffering that the buffer introduces (configurable)
+ *
+ * The jitter buffer can optionally do automatic adjustment of the amount of delay
+ * based on the jitter it detects in incoming packets.
+ */
+
+int64_t ajb_round_to_multiple(int64_t num, int64_t factor);
+
+struct JBPacket {
+  int64_t seq_num;
+  int64_t PTS;
+  int span;
+  void *payload_data;
+  
+  /// Jitter buffer fills this out. In TS units
+  int64_t time_added;
+  
+  inline bool operator <(const JBPacket &other) const {
+    return this->seq_num < other.seq_num;
+  }
+};
+
+class AdaptiveJitterBuffer {
+  float lossFactor;
+  DelayCalculator delayCalculator;
+  DelayCalculator slaveDelayCalculator;
+  SortedDeque<JBPacket> buffer;
+  
+  int timebase;
+  bool isLive;
+  bool autoAdjust;
+  
+  /// Timestamp of the last packet (highest seq num) in the buffer
+  int64_t bufferBackPTS;
+  /// Time when the last packet (highest seq num) was added
+  int64_t bufferBackAddedTime;
+  /// How much delay/buffering we want to have
+  int64_t targetDelay;
+  /// Leftover bias (increase in target delay) from last time we buffered
+  int64_t delayBias;
+  
+  struct timeval bufferStartedTime;
+  int optimizeDelayCounter;
+  
+  void checkAutoAdjustDelay();
+  int computePacketDelay(int64_t pts, int64_t time_added);
+public:
+  /** Sets some sensible defaults.
+      
+      @param timebase how many timestamp units happen per second
+      @param is_live do we intend to play live video?
+      @param is_wifi are we on a good connection?
+   */
+  AdaptiveJitterBuffer(int timebase, bool is_live, bool is_wifi);
+  
+  DelayCalculator &getSlaveDelayCalculator() { return slaveDelayCalculator; }
+  
+  void setAutoAdjust(bool adjust) { autoAdjust = adjust; }
+  int64_t getTargetDelay() const { return targetDelay; }
+  void setTargetDelay(int64_t new_delay) { targetDelay = new_delay; }
+  void addBias(int64_t new_bias) { delayBias += new_bias; }
+  
+  size_t getNumPackets() const { return buffer.size(); }
+  int64_t getBufferLength() const;
+  bool getFullyBuffered() const { return getBufferLength() >= getTargetDelay(); }
+  
+  void addPacket(JBPacket packet, int64_t *time_added = NULL);
+  void addSlaveTiming(int64_t equiv_pts, int64_t *time_added = NULL);
+  int64_t peekPTS() const;
+  bool nextPacket(JBPacket &out_packet);
+  float getPlaySpeedForPTS(int64_t pts);
+  
+  // Internal, don't use in production
+  void optimizeDelay();
+#ifdef DEBUG
+  AdaptiveJitterBuffer(int timebase, int loss_factor);
+#endif
+};
+
+#endif /* defined(__NexusVideoPlayer__AdaptiveJitterBuffer__) */
diff --git a/NexusVideoPlayer/jitter/DelayCalculator.cpp b/NexusVideoPlayer/jitter/DelayCalculator.cpp
new file mode 100644
index 0000000..73dda01
--- /dev/null
+++ b/NexusVideoPlayer/jitter/DelayCalculator.cpp
@@ -0,0 +1,91 @@
+//
+//  DelayCalculator.cpp
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 11/14/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#include "DelayCalculator.hpp"
+
+#include <cassert>
+#include <vector>
+#include <algorithm>
+#include <limits>
+
+//#define DEBUG_PRINT_TIMING_COSTS
+
+DelayCalculator::DelayCalculator(int max_timestamp_span, int delay_for_1p_loss)
+: maxTimestampSpan(max_timestamp_span), delayLossFactor(delay_for_1p_loss)
+{}
+
+int DelayCalculator::timestampSpan() const {
+  return timings.back().packet_ts - timings.front().packet_ts;
+}
+
+void DelayCalculator::addTiming(int64_t packet_ts, int packet_delay) {
+  TimingInfo ti = {};
+  ti.packet_ts = packet_ts;
+  ti.packet_delay = packet_delay;
+  timings.insert(ti, HINT_BACK);
+  
+  while (timestampSpan() >= maxTimestampSpan) {
+    timings.pop_front();
+  }
+  
+//  printf("Added timing: PTS %lld, delay %d\n", ti.packet_ts, ti.packet_delay);
+  
+#ifdef DEBUG
+  assert(timings.debugIsSorted());
+#endif
+}
+
+bool _timingInfoDelayComp(const TimingInfo &t1, const TimingInfo &t2) {
+  return t1.packet_delay > t2.packet_delay;
+}
+
+size_t DelayCalculator::numTimingsValidFor(int64_t ts) const {
+  size_t count = 0;
+  for (auto it = timings.rbegin(); it != timings.rend(); it++) {
+    if ((*it).packet_ts >= (ts - maxTimestampSpan)) {
+      count++;
+    }
+    else {
+      break;
+    }
+  }
+  return count;
+}
+
+int64_t DelayCalculator::getOptimalDelay(float max_packet_loss) const {
+  // If there are no timings, nothing interesting we can report
+  if (timings.size() == 0) return 0;
+  
+  // Sort by size of delay (latest first)
+  std::vector<TimingInfo> sorted_timings(timings.begin(), timings.end());
+  std::sort(sorted_timings.begin(), sorted_timings.end(), _timingInfoDelayComp);
+  
+  // Find the best delay
+  int best_cost = std::numeric_limits<int>::max();
+  int64_t best_delay = std::numeric_limits<int64_t>::max();
+  for (int i = 0; i < sorted_timings.size(); i++) {
+    const TimingInfo &ti = sorted_timings[i];
+    
+    // i is the number of packets lost if we pick this delay
+    float ratio_packets_lost = ((float)i / sorted_timings.size());
+    if (ratio_packets_lost > max_packet_loss) break;
+    
+    float pc_packets_lost = ratio_packets_lost * 100.0f;
+    int this_cost = ti.packet_delay + (pc_packets_lost * delayLossFactor);
+    if (this_cost <= best_cost) {
+      best_cost = this_cost;
+      best_delay = ti.packet_delay;
+    }
+    
+#ifdef DEBUG_PRINT_TIMING_COSTS
+    printf("Cost of using delay %d: %d\n", ti.packet_delay, this_cost);
+#endif
+  }
+  
+  return best_delay;
+}
\ No newline at end of file
diff --git a/NexusVideoPlayer/jitter/DelayCalculator.hpp b/NexusVideoPlayer/jitter/DelayCalculator.hpp
new file mode 100644
index 0000000..37b70a1
--- /dev/null
+++ b/NexusVideoPlayer/jitter/DelayCalculator.hpp
@@ -0,0 +1,48 @@
+//
+//  DelayCalculator.h
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 11/14/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#ifndef __NexusVideoPlayer__DelayCalculator__
+#define __NexusVideoPlayer__DelayCalculator__
+
+#include <stdint.h>
+
+#include "SortedDeque.hpp"
+
+struct TimingInfo {
+  int64_t packet_ts;
+  int packet_delay;
+  
+  inline bool operator <(const TimingInfo &other) const {
+    return this->packet_ts < other.packet_ts;
+  }
+};
+
+class DelayCalculator
+{
+  int maxTimestampSpan;
+  int delayLossFactor;
+  SortedDeque<TimingInfo> timings;
+  
+  int timestampSpan() const;
+  
+public:
+  DelayCalculator(int max_timestamp_span, int delay_for_1p_loss);
+  
+  void setDelayLossFactor(int new_factor) { delayLossFactor = new_factor; }
+  
+  void addTiming(int64_t packet_ts, int packet_delay);
+  size_t numTimingsValidFor(int64_t ts) const;
+  /**
+   @return INT64_MAX on error, otherwise optimal delay in timestamp units
+   @param max_packet_loss ratio out of 1.0. Don't consider scenarios with
+          higher packet loss than this.
+   */
+  int64_t getOptimalDelay(float max_packet_loss) const;
+};
+
+#endif /* defined(__NexusVideoPlayer__DelayCalculator__) */
diff --git a/NexusVideoPlayer/jitter/JitterTunables.h b/NexusVideoPlayer/jitter/JitterTunables.h
new file mode 100644
index 0000000..713eec5
--- /dev/null
+++ b/NexusVideoPlayer/jitter/JitterTunables.h
@@ -0,0 +1,49 @@
+//
+//  JitterTunables.h
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 11/25/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#ifndef NexusVideoPlayer_JitterTunables_h
+#define NexusVideoPlayer_JitterTunables_h
+
+// ALL TIME UNITS IN SECONDS
+
+// TODO: the jitter buffer panics when both of these conditions are violated
+// During live playback, buffer can be at most this long
+#define AJB_LIVE_MAX_BUFFER_SIZE 5.0f
+// Out of 1.0
+#define AJB_MAX_PACKET_LOSS 0.15f
+
+// Loss factor (seconds of delay per 1% loss) during live playback
+#define AJB_LOSS_FACTOR_LIVE 0.2f
+// Loss factor during recorded playback
+#define AJB_LOSS_FACTOR_RECORDED 10.0f
+
+// Initial delay ("buffer size") for live playback
+#define AJB_INITIAL_DELAY_LIVE_WIFI 0.2f
+#define AJB_INITIAL_DELAY_LIVE_NONWIFI 0.2f
+#define AJB_INITIAL_DELAY_RECORDED 5.0f
+
+// How many seconds to keep timing samples for
+#define AJB_MAX_TIMING_AGE 10.0f
+
+// How many timing samples we need to compute delay
+#define AJB_MIN_TIMING_SAMPLES 10
+
+// After how many packets should the jitter buffer re-optimize delay
+#define AJB_OPTIMIZE_DELAY_INTERVAL_PACKETS 25
+
+// How to calculate desired play speed from buffer size discrepancy
+#define AJB_MIN_PLAY_SPEED 0.8f
+#define AJB_MAX_PLAY_SPEED 1.75f
+#define AJB_PLAY_SPEED_FACTOR_CUBE 0.15f
+#define AJB_PLAY_SPEED_FACTOR_LIN 0.4f
+
+// For Speex: min/max play speeds when not silent
+#define AJB_ACTIVITY_MIN_PLAY_SPEED 0.95f
+#define AJB_ACTIVITY_MAX_PLAY_SPEED 1.10f
+
+#endif
diff --git a/NexusVideoPlayer/jitter/SkipInterpCounter.cpp b/NexusVideoPlayer/jitter/SkipInterpCounter.cpp
new file mode 100644
index 0000000..a1a22d3
--- /dev/null
+++ b/NexusVideoPlayer/jitter/SkipInterpCounter.cpp
@@ -0,0 +1,57 @@
+//
+//  SkipInterpCounter.cpp
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 12/16/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#include <cassert>
+#include <cmath>
+#include <cstdio>
+
+#include "SkipInterpCounter.hpp"
+
+SkipInterpCounter::SkipInterpCounter()
+: currentMode(PLAY_MODE_NORMAL), currentSpeed(1.0f), accumulator(0.0f), delta(0.0f)
+{
+}
+
+void SkipInterpCounter::setPlaybackSpeed(float speed) {
+  if (speed >= 1.01f) {
+    currentMode = PLAY_MODE_SKIP;
+    if (speed != currentSpeed) {
+      delta = 1.0f - 1.0f/speed;
+    }
+  }
+  else if (speed <= 0.99f) {
+    currentMode = PLAY_MODE_INTERP;
+    if (speed != currentSpeed) {
+      delta = 1.0f - speed;
+    }
+  }
+  else {
+    currentMode = PLAY_MODE_NORMAL;
+    delta = 0.0f;
+  }
+  
+  currentSpeed = speed;
+}
+
+void SkipInterpCounter::tick() {
+  if (accumulator >= 1.0f) {
+    accumulator -= 1.0f;
+  }
+  accumulator += delta;
+  
+  float noise = ((float)rand() / RAND_MAX) - 0.5f;
+  accumulator += noise * delta;
+}
+
+bool SkipInterpCounter::shouldSkip() const {
+  return currentMode == PLAY_MODE_SKIP && accumulator >= 1.0f;
+}
+
+bool SkipInterpCounter::shouldInterp() const {
+  return currentMode == PLAY_MODE_INTERP && accumulator >= 1.0f;
+}
\ No newline at end of file
diff --git a/NexusVideoPlayer/jitter/SkipInterpCounter.hpp b/NexusVideoPlayer/jitter/SkipInterpCounter.hpp
new file mode 100644
index 0000000..9df4241
--- /dev/null
+++ b/NexusVideoPlayer/jitter/SkipInterpCounter.hpp
@@ -0,0 +1,41 @@
+//
+//  SkipInterpCounter.h
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 12/16/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#ifndef __NexusVideoPlayer__SkipInterpCounter__
+#define __NexusVideoPlayer__SkipInterpCounter__
+
+#include <cstdlib>
+
+/** A counter that decides when to skip or interpolate packets based on
+  * a given playback speed. Useful for adjusting audio playback speed
+  * without a complex algorithm.
+  */
+class SkipInterpCounter {
+  enum PlayMode {
+    PLAY_MODE_NORMAL,
+    PLAY_MODE_SKIP,
+    PLAY_MODE_INTERP
+  };
+  
+  PlayMode currentMode;
+  float currentSpeed;
+  float accumulator;
+  float delta;
+  
+public:
+  SkipInterpCounter();
+  
+  float getPlaybackSpeed() const { return currentSpeed; }
+  void setPlaybackSpeed(float speed);
+  void tick();
+  
+  bool shouldSkip() const;
+  bool shouldInterp() const;
+};
+
+#endif /* defined(__NexusVideoPlayer__SkipInterpCounter__) */
diff --git a/NexusVideoPlayer/jitter/SortedDeque.hpp b/NexusVideoPlayer/jitter/SortedDeque.hpp
new file mode 100644
index 0000000..45c5446
--- /dev/null
+++ b/NexusVideoPlayer/jitter/SortedDeque.hpp
@@ -0,0 +1,125 @@
+//
+//  SortedDeque.h
+//  NexusVideoPlayer
+//
+//  Created by Frank Huang on 11/20/13.
+//  Copyright (c) 2013 Dropcam. All rights reserved.
+//
+
+#ifndef __NexusVideoPlayer__SortedDeque__
+#define __NexusVideoPlayer__SortedDeque__
+
+#include <deque>
+
+enum PositionHint {
+  HINT_NONE,
+  HINT_FRONT,
+  HINT_BACK
+};
+
+// A deque that keeps itself sorted
+// operator < must be well-defined for T < T
+// When two items compare equal, the new item is inserted after the old one
+template <class T>
+class SortedDeque
+{
+public:
+  typedef typename std::deque<T>::iterator iterator;
+  typedef typename std::deque<T>::const_iterator const_iterator;
+  typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
+  typedef T &reference;
+  typedef const T &const_reference;
+  
+private:
+  std::deque<T> buffer;
+  
+  iterator insert_back(const T &value) {
+    iterator out_it = buffer.end();
+    bool inserted = false;
+    
+    for (auto it = buffer.rbegin(); it != buffer.rend(); it++) {
+      if (!(value < (*it))) { // if ((*it) <= value)
+        out_it = buffer.insert(it.base(), value);
+        inserted = true;
+        break;
+      }
+    }
+    
+    if (!inserted) {
+      buffer.push_front(value);
+      out_it = buffer.begin();
+    }
+    
+    return out_it;
+  }
+  
+  iterator insert_front(const T &value) {
+    iterator out_it = buffer.end();
+    bool inserted = false;
+    
+    for (auto it = buffer.begin(); it != buffer.end(); it++) {
+      if (value < (*it)) {
+        out_it = buffer.insert(it, value);
+        inserted = true;
+        break;
+      }
+    }
+    
+    if (!inserted) {
+      buffer.push_back(value);
+      out_it = buffer.end()-1;
+    }
+    
+    return out_it;
+  }
+  
+public:
+  SortedDeque() {}
+  
+  /// @param hint if you suspect the new item should go in the front or back, pass a hint to speed up insertion
+  iterator insert(const T &value, PositionHint hint = HINT_NONE) {
+    switch (hint) {
+    case HINT_NONE:
+    case HINT_BACK:
+      return insert_back(value);
+      break;
+      
+    case HINT_FRONT:
+    default:
+      return insert_front(value);
+      break;
+    }
+  }
+  
+  iterator begin() { return buffer.begin(); }
+  const_iterator begin() const { return buffer.begin(); }
+  iterator end() { return buffer.end(); }
+  const_iterator end() const { return buffer.end(); }
+  
+  const_reverse_iterator rbegin() const { return buffer.rbegin(); }
+  const_reverse_iterator rend() const { return buffer.rend(); }
+  
+  size_t size() const { return buffer.size(); }
+  
+  reference front() { return buffer.front(); }
+  const_reference front() const { return buffer.front(); }
+  void pop_front() { buffer.pop_front(); }
+  reference back() { return buffer.back(); }
+  const_reference back() const { return buffer.back(); }
+  iterator erase(const_iterator position) { return buffer.erase(position); }
+  
+#ifdef DEBUG
+  /// Debug method to check if deque is still sorted
+  bool debugIsSorted() {
+    if (buffer.size() == 0) return true;
+    for (auto it = buffer.begin()+1; it != buffer.end(); it++) {
+      auto prev = *(it-1);
+      auto curr = *it;
+      if (curr < prev) return false;
+    }
+    return true;
+  }
+#endif
+};
+
+#endif /* defined(__NexusVideoPlayer__SortedDeque__) */