Cleanups in the video encoding decoding code. Reenable VP8.

1. Moved video-related protobuf messages from event.proto to video.proto. Removed those that we don't need anymore
2. Fixed naming for enums and some types.
3. Reenabled VP8.
4. Proper RGB-YUV converter for VP8 encoder.
5. Changed the capturer_fake to show more meaningful picture.

BUG=57374
TEST=unittests

Review URL: http://codereview.chromium.org/4136010

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@64700 0039d316-1c4b-4281-b951-d872f2087c98
diff --git a/remoting/base/capture_data.h b/remoting/base/capture_data.h
index 0d707f9..97631ab 100644
--- a/remoting/base/capture_data.h
+++ b/remoting/base/capture_data.h
@@ -10,7 +10,7 @@
 #include "base/basictypes.h"
 #include "base/ref_counted.h"
 #include "remoting/base/types.h"
-#include "remoting/proto/event.pb.h"
+#include "remoting/proto/video.pb.h"
 
 namespace remoting {
 
diff --git a/remoting/base/codec_test.cc b/remoting/base/codec_test.cc
index 62b98607..dcc165e 100644
--- a/remoting/base/codec_test.cc
+++ b/remoting/base/codec_test.cc
@@ -317,7 +317,7 @@
 scoped_refptr<CaptureData> PrepareEncodeData(PixelFormat format,
                                              uint8** memory) {
   // TODO(hclam): Support also YUV format.
-  CHECK(format == PixelFormatRgb32);
+  CHECK(format == PIXEL_FORMAT_RGB32);
   int size = kWidth * kHeight * kBytesPerPixel;
 
   *memory = new uint8[size];
@@ -360,7 +360,7 @@
 
   uint8* memory;
   scoped_refptr<CaptureData> data =
-      PrepareEncodeData(PixelFormatRgb32, &memory);
+      PrepareEncodeData(PIXEL_FORMAT_RGB32, &memory);
 
   TestEncodingRects(encoder, &tester, data, kTestRects, 1);
   TestEncodingRects(encoder, &tester, data, kTestRects + 1, 1);
@@ -412,7 +412,7 @@
 
   uint8* memory;
   scoped_refptr<CaptureData> data =
-      PrepareEncodeData(PixelFormatRgb32, &memory);
+      PrepareEncodeData(PIXEL_FORMAT_RGB32, &memory);
   DecoderTester decoder_tester(decoder);
   decoder_tester.set_strict(strict);
   decoder_tester.set_capture_data(data);
diff --git a/remoting/base/decoder.h b/remoting/base/decoder.h
index 0a6ceff0..82211a7f 100644
--- a/remoting/base/decoder.h
+++ b/remoting/base/decoder.h
@@ -9,7 +9,7 @@
 #include "base/scoped_ptr.h"
 #include "gfx/rect.h"
 #include "media/base/video_frame.h"
-#include "remoting/proto/event.pb.h"
+#include "remoting/proto/video.pb.h"
 
 namespace remoting {
 
@@ -51,7 +51,7 @@
   // Returns true if decoder is ready to accept data via ProcessRectangleData.
   virtual bool IsReadyForData() = 0;
 
-  virtual UpdateStreamEncoding Encoding() = 0;
+  virtual VideoPacketFormat::Encoding Encoding() = 0;
 };
 
 }  // namespace remoting
diff --git a/remoting/base/decoder_row_based.cc b/remoting/base/decoder_row_based.cc
index 65c74c5..d378e96 100644
--- a/remoting/base/decoder_row_based.cc
+++ b/remoting/base/decoder_row_based.cc
@@ -12,15 +12,17 @@
 namespace remoting {
 
 DecoderRowBased* DecoderRowBased::CreateZlibDecoder() {
-  return new DecoderRowBased(new DecompressorZlib(), EncodingZlib);
+  return new DecoderRowBased(new DecompressorZlib(),
+                             VideoPacketFormat::ENCODING_ZLIB);
 }
 
 DecoderRowBased* DecoderRowBased::CreateVerbatimDecoder() {
-  return new DecoderRowBased(new DecompressorVerbatim(), EncodingNone);
+  return new DecoderRowBased(new DecompressorVerbatim(),
+                             VideoPacketFormat::ENCODING_VERBATIM);
 }
 
 DecoderRowBased::DecoderRowBased(Decompressor* decompressor,
-                                 UpdateStreamEncoding encoding)
+                                 VideoPacketFormat::Encoding encoding)
     : state_(kUninitialized),
       decompressor_(decompressor),
       encoding_(encoding),
@@ -52,7 +54,7 @@
   // Make sure we are not currently initialized.
   CHECK_EQ(kUninitialized, state_);
 
-  if (static_cast<PixelFormat>(frame->format()) != PixelFormatRgb32) {
+  if (static_cast<PixelFormat>(frame->format()) != PIXEL_FORMAT_RGB32) {
     LOG(WARNING) << "DecoderRowBased only supports RGB32.";
     state_ = kError;
     return;
diff --git a/remoting/base/decoder_row_based.h b/remoting/base/decoder_row_based.h
index c226db2..2deb897d 100644
--- a/remoting/base/decoder_row_based.h
+++ b/remoting/base/decoder_row_based.h
@@ -24,7 +24,7 @@
   virtual void Initialize(scoped_refptr<media::VideoFrame> frame,
                           const gfx::Rect& clip, int bytes_per_src_pixel);
   virtual void DecodeBytes(const std::string& encoded_bytes);
-  virtual UpdateStreamEncoding Encoding() { return encoding_; }
+  virtual VideoPacketFormat::Encoding Encoding() { return encoding_; }
 
   // TODO(hclam): Should make this into the Decoder interface.
   // TODO(ajwong): Before putting into the interface, we should decide if the
@@ -32,7 +32,8 @@
   void set_reverse_rows(bool reverse) { reverse_rows_ = reverse; }
 
  private:
-  DecoderRowBased(Decompressor* decompressor, UpdateStreamEncoding encoding);
+  DecoderRowBased(Decompressor* decompressor,
+                  VideoPacketFormat::Encoding encoding);
 
   enum State {
     kUninitialized,
@@ -53,7 +54,7 @@
   scoped_ptr<Decompressor> decompressor_;
 
   // The encoding of the incoming stream.
-  UpdateStreamEncoding encoding_;
+  VideoPacketFormat::Encoding encoding_;
 
   // Number of bytes per pixel from source stream.
   int bytes_per_src_pixel_;
diff --git a/remoting/base/decoder_vp8.cc b/remoting/base/decoder_vp8.cc
index 452cd1f..a8d5245 100644
--- a/remoting/base/decoder_vp8.cc
+++ b/remoting/base/decoder_vp8.cc
@@ -18,12 +18,7 @@
 namespace remoting {
 
 DecoderVp8::DecoderVp8()
-    : state_(kWaitingForBeginRect),
-      rect_x_(0),
-      rect_y_(0),
-      rect_width_(0),
-      rect_height_(0),
-      updated_rects_(NULL),
+    : state_(kUninitialized),
       codec_(NULL) {
 }
 
@@ -35,71 +30,22 @@
   delete codec_;
 }
 
-bool DecoderVp8::BeginDecode(scoped_refptr<media::VideoFrame> frame,
-                             UpdatedRects* updated_rects,
-                             Task* partial_decode_done,
-                             Task* decode_done) {
-  DCHECK(!partial_decode_done_.get());
-  DCHECK(!decode_done_.get());
-  DCHECK(!updated_rects_);
-  DCHECK_EQ(kWaitingForBeginRect, state_);
-
-  partial_decode_done_.reset(partial_decode_done);
-  decode_done_.reset(decode_done);
-  updated_rects_ = updated_rects;
+void DecoderVp8::Initialize(scoped_refptr<media::VideoFrame> frame,
+                            const gfx::Rect& clip, int bytes_per_src_pixel) {
+  DCHECK_EQ(kUninitialized, state_);
 
   if (frame->format() != media::VideoFrame::RGB32) {
     LOG(INFO) << "DecoderVp8 only supports RGB32 as output";
-    return false;
+    state_ = kError;
+    return;
   }
   frame_ = frame;
-  return true;
+
+  state_ = kReady;
 }
 
-bool DecoderVp8::PartialDecode(ChromotingHostMessage* message) {
-  scoped_ptr<ChromotingHostMessage> msg_deleter(message);
-  DCHECK(message->has_update_stream_packet());
-
-  bool ret = true;
-  if (message->update_stream_packet().has_begin_rect())
-    ret = HandleBeginRect(message);
-  if (ret && message->update_stream_packet().has_rect_data())
-    ret = HandleRectData(message);
-  if (ret && message->update_stream_packet().has_end_rect())
-    ret = HandleEndRect(message);
-  return ret;
-}
-
-void DecoderVp8::EndDecode() {
-  DCHECK_EQ(kWaitingForBeginRect, state_);
-  decode_done_->Run();
-
-  partial_decode_done_.reset();
-  decode_done_.reset();
-  frame_ = NULL;
-  updated_rects_ = NULL;
-}
-
-bool DecoderVp8::HandleBeginRect(ChromotingHostMessage* message) {
-  DCHECK_EQ(kWaitingForBeginRect, state_);
-  state_ = kWaitingForRectData;
-
-  rect_width_ = message->update_stream_packet().begin_rect().width();
-  rect_height_ = message->update_stream_packet().begin_rect().height();
-  rect_x_ = message->update_stream_packet().begin_rect().x();
-  rect_y_ = message->update_stream_packet().begin_rect().y();
-
-  PixelFormat pixel_format =
-      message->update_stream_packet().begin_rect().pixel_format();
-  if (pixel_format != PixelFormatYv12)
-    return false;
-  return true;
-}
-
-bool DecoderVp8::HandleRectData(ChromotingHostMessage* message) {
-  DCHECK_EQ(kWaitingForRectData, state_);
-  DCHECK_EQ(0,
-            message->update_stream_packet().rect_data().sequence_number());
+void DecoderVp8::DecodeBytes(const std::string& encoded_bytes) {
+  DCHECK_EQ(kReady, state_);
 
   // Initialize the codec as needed.
   if (!codec_) {
@@ -112,25 +58,21 @@
       LOG(INFO) << "Cannot initialize codec.";
       delete codec_;
       codec_ = NULL;
-      return false;
+      state_ = kError;
+      return;
     }
   }
 
+  LOG(WARNING) << "Decoding " <<  encoded_bytes.size();
+
   // Do the actual decoding.
   vpx_codec_err_t ret = vpx_codec_decode(
-      codec_,
-      (uint8_t*)message->update_stream_packet().rect_data().data().c_str(),
-      message->update_stream_packet().rect_data().data().size(),
-      NULL, 0);
+      codec_, reinterpret_cast<const uint8*>(encoded_bytes.data()),
+      encoded_bytes.size(), NULL, 0);
   if (ret != VPX_CODEC_OK) {
-    LOG(INFO) << "Decoding failed:"
-              << vpx_codec_err_to_string(ret)
-              << "\n"
-              << "Details: "
-              << vpx_codec_error(codec_)
-              << "\n"
+    LOG(INFO) << "Decoding failed:" << vpx_codec_err_to_string(ret) << "\n"
+              << "Details: " << vpx_codec_error(codec_) << "\n"
               << vpx_codec_error_detail(codec_);
-    return false;
   }
 
   // Gets the decoded data.
@@ -138,28 +80,28 @@
   vpx_image_t* image = vpx_codec_get_frame(codec_, &iter);
   if (!image) {
     LOG(INFO) << "No video frame decoded";
-    return false;
   }
 
   // Perform YUV conversion.
   media::ConvertYUVToRGB32(image->planes[0], image->planes[1], image->planes[2],
                            frame_->data(media::VideoFrame::kRGBPlane),
-                           rect_width_, rect_height_,
+                           frame_->width(), frame_->height(),
                            image->stride[0], image->stride[1],
                            frame_->stride(media::VideoFrame::kRGBPlane),
                            media::YV12);
-
-  updated_rects_->clear();
-  updated_rects_->push_back(gfx::Rect(rect_x_, rect_y_,
-                                      rect_width_, rect_height_));
-  partial_decode_done_->Run();
-  return true;
 }
 
-bool DecoderVp8::HandleEndRect(ChromotingHostMessage* message) {
-  DCHECK_EQ(kWaitingForRectData, state_);
-  state_ = kWaitingForBeginRect;
-  return true;
+void DecoderVp8::Reset() {
+  frame_ = NULL;
+  state_ = kUninitialized;
+}
+
+bool DecoderVp8::IsReadyForData() {
+  return state_ == kReady;
+}
+
+VideoPacketFormat::Encoding DecoderVp8::Encoding() {
+  return VideoPacketFormat::ENCODING_VP8;
 }
 
 }  // namespace remoting
diff --git a/remoting/base/decoder_vp8.h b/remoting/base/decoder_vp8.h
index 5fe0169..dfef0b7 100644
--- a/remoting/base/decoder_vp8.h
+++ b/remoting/base/decoder_vp8.h
@@ -14,37 +14,34 @@
 class DecoderVp8 : public Decoder {
  public:
   DecoderVp8();
-  ~DecoderVp8();
+  virtual ~DecoderVp8();
 
   // Decoder implementations.
-  virtual bool BeginDecode(scoped_refptr<media::VideoFrame> frame,
-                           UpdatedRects* update_rects,
-                           Task* partial_decode_done,
-                           Task* decode_done);
-  virtual bool PartialDecode(ChromotingHostMessage* message);
-  virtual void EndDecode();
+  virtual void Initialize(scoped_refptr<media::VideoFrame> frame,
+                          const gfx::Rect& clip, int bytes_per_src_pixel);
+
+  virtual void Reset();
+
+  // Feeds more data into the decoder.
+  virtual void DecodeBytes(const std::string& encoded_bytes);
+
+  // Returns true if decoder is ready to accept data via ProcessRectangleData.
+  virtual bool IsReadyForData();
+
+  virtual VideoPacketFormat::Encoding Encoding();
 
  private:
-  bool HandleBeginRect(ChromotingHostMessage* message);
-  bool HandleRectData(ChromotingHostMessage* message);
-  bool HandleEndRect(ChromotingHostMessage* message);
+  enum State {
+    kUninitialized,
+    kReady,
+    kError,
+  };
 
   // The internal state of the decoder.
   State state_;
 
-  // Keeps track of the updating rect.
-  int rect_x_;
-  int rect_y_;
-  int rect_width_;
-  int rect_height_;
-
-  // Tasks to call when decode is done.
-  scoped_ptr<Task> partial_decode_done_;
-  scoped_ptr<Task> decode_done_;
-
   // The video frame to write to.
   scoped_refptr<media::VideoFrame> frame_;
-  UpdatedRects* updated_rects_;
 
   vpx_codec_ctx_t* codec_;
 
diff --git a/remoting/base/encoder.h b/remoting/base/encoder.h
index f9d4043a..05e40ff 100644
--- a/remoting/base/encoder.h
+++ b/remoting/base/encoder.h
@@ -25,23 +25,12 @@
 class Encoder {
  public:
 
-  // EncodingState is a bitfield that tracks the state of the encoding.
-  // An encoding that consists of a single block could concievably be starting
-  // inprogress and ended at the same time.
-  enum {
-    EncodingStarting = 1 << 0,
-    EncodingInProgress = 1 << 1,
-    EncodingEnded = 1 << 2
-  };
-  typedef int EncodingState;
-
   // DataAvailableCallback is called as blocks of data are made available
   // from the encoder. Data made available by the encoder is in the form
   // of HostMessage to reduce the amount of memory copies.
   // The callback takes ownership of the HostMessage and is responsible for
   // deleting it.
-  typedef Callback2<ChromotingHostMessage*,
-                    EncodingState>::Type DataAvailableCallback;
+  typedef Callback1<VideoPacket*>::Type DataAvailableCallback;
 
   virtual ~Encoder() {}
 
diff --git a/remoting/base/encoder_verbatim.cc b/remoting/base/encoder_verbatim.cc
index 8c59a7d9..0bbe4e8 100644
--- a/remoting/base/encoder_verbatim.cc
+++ b/remoting/base/encoder_verbatim.cc
@@ -52,15 +52,14 @@
   const int bytes_per_pixel = GetBytesPerPixel(capture_data_->pixel_format());
   const int row_size = bytes_per_pixel * rect.width();
 
-  ChromotingHostMessage* message = new ChromotingHostMessage();
-  RectangleUpdatePacket* update = message->mutable_rectangle_update();
-  PrepareUpdateStart(rect, update);
+  VideoPacket* packet = new VideoPacket();
+  PrepareUpdateStart(rect, packet);
 
   const uint8* in = capture_data_->data_planes().data[0] +
                     rect.y() * stride +
                     rect.x() * bytes_per_pixel;
   // TODO(hclam): Fill in the sequence number.
-  uint8* out = GetOutputBuffer(update, packet_size_);
+  uint8* out = GetOutputBuffer(packet, packet_size_);
   int total_bytes = 0;
   for (int y = 0; y < rect.height(); y++) {
     memcpy(out, in, row_size);
@@ -70,49 +69,36 @@
   }
 
   // We have reached the end of stream.
-  update->set_flags(update->flags() | RectangleUpdatePacket::LAST_PACKET);
+  packet->set_flags(packet->flags() | VideoPacket::LAST_PACKET);
 
   // If we have filled the message or we have reached the end of stream.
-  message->mutable_rectangle_update()->mutable_encoded_rect()->
-      resize(total_bytes);
-  SubmitMessage(message, rect_index);
+  packet->mutable_data()->resize(total_bytes);
+  SubmitMessage(packet, rect_index);
 }
 
 void EncoderVerbatim::PrepareUpdateStart(const gfx::Rect& rect,
-                                         RectangleUpdatePacket* update) {
+                                         VideoPacket* packet) {
 
-  update->set_flags(update->flags() | RectangleUpdatePacket::FIRST_PACKET);
-  RectangleFormat* format = update->mutable_format();
+  packet->set_flags(packet->flags() | VideoPacket::FIRST_PACKET);
+  VideoPacketFormat* format = packet->mutable_format();
 
   format->set_x(rect.x());
   format->set_y(rect.y());
   format->set_width(rect.width());
   format->set_height(rect.height());
-  format->set_encoding(EncodingNone);
+  format->set_encoding(VideoPacketFormat::ENCODING_VERBATIM);
   format->set_pixel_format(capture_data_->pixel_format());
 }
 
-uint8* EncoderVerbatim::GetOutputBuffer(RectangleUpdatePacket* update,
-                                        size_t size) {
-  update->mutable_encoded_rect()->resize(size);
+uint8* EncoderVerbatim::GetOutputBuffer(VideoPacket* packet, size_t size) {
+  packet->mutable_data()->resize(size);
   // TODO(ajwong): Is there a better way to do this at all???
   return const_cast<uint8*>(reinterpret_cast<const uint8*>(
-      update->mutable_encoded_rect()->data()));
+      packet->mutable_data()->data()));
 }
 
-void EncoderVerbatim::SubmitMessage(ChromotingHostMessage* message,
-                                    size_t rect_index) {
-  EncodingState state = EncodingInProgress;
-  const RectangleUpdatePacket& update = message->rectangle_update();
-  if (rect_index == 0 &&
-      (update.flags() | RectangleUpdatePacket::FIRST_PACKET)) {
-    state |= EncodingStarting;
-  }
-  if (rect_index == capture_data_->dirty_rects().size() - 1 &&
-      (update.flags() | RectangleUpdatePacket::LAST_PACKET)) {
-    state |= EncodingEnded;
-  }
-  callback_->Run(message, state);
+void EncoderVerbatim::SubmitMessage(VideoPacket* packet, size_t rect_index) {
+  callback_->Run(packet);
 }
 
 }  // namespace remoting
diff --git a/remoting/base/encoder_verbatim.h b/remoting/base/encoder_verbatim.h
index 10fa7ac..81d109d 100644
--- a/remoting/base/encoder_verbatim.h
+++ b/remoting/base/encoder_verbatim.h
@@ -31,15 +31,14 @@
   void EncodeRect(const gfx::Rect& rect, size_t rect_index);
 
   // Marks a packets as the first in a series of rectangle updates.
-  void PrepareUpdateStart(const gfx::Rect& rect,
-                          RectangleUpdatePacket* update);
+  void PrepareUpdateStart(const gfx::Rect& rect, VideoPacket* packet);
 
   // Retrieves a pointer to the output buffer in |update| used for storing the
   // encoded rectangle data.  Will resize the buffer to |size|.
-  uint8* GetOutputBuffer(RectangleUpdatePacket* update, size_t size);
+  uint8* GetOutputBuffer(VideoPacket* packet, size_t size);
 
   // Submit |message| to |callback_|.
-  void SubmitMessage(ChromotingHostMessage* message, size_t rect_index);
+  void SubmitMessage(VideoPacket* packet, size_t rect_index);
 
   scoped_refptr<CaptureData> capture_data_;
   scoped_ptr<DataAvailableCallback> callback_;
diff --git a/remoting/base/encoder_vp8.cc b/remoting/base/encoder_vp8.cc
index eec6ed5d..7a58f7b 100644
--- a/remoting/base/encoder_vp8.cc
+++ b/remoting/base/encoder_vp8.cc
@@ -48,6 +48,7 @@
   vpx_codec_enc_cfg_t config;
   const vpx_codec_iface_t* algo =
       (const vpx_codec_iface_t*)media::GetVp8CxAlgoAddress();
+  CHECK(algo);
   vpx_codec_err_t ret = vpx_codec_enc_config_default(algo, &config, 0);
   if (ret != VPX_CODEC_OK)
     return false;
@@ -70,9 +71,19 @@
   return true;
 }
 
+static int clip_byte(int x) {
+  if (x > 255)
+    return 255;
+  else if (x < 0)
+    return 0;
+  else
+    return x;
+}
+
 bool EncoderVp8::PrepareImage(scoped_refptr<CaptureData> capture_data) {
+  const int plane_size = capture_data->width() * capture_data->height();
+
   if (!yuv_image_.get()) {
-    const int plane_size = capture_data->width() * capture_data->height();
 
     // YUV image size is 1.5 times of a plane. Multiplication is performed first
     // to avoid rounding error.
@@ -100,22 +111,36 @@
 
   // And then do RGB->YUV conversion.
   // Currently we just produce the Y channel as the average of RGB. This will
-  // give a gray scale image after conversion.
-  // TODO(hclam): Implement the actual color space conversion.
-  DCHECK(capture_data->pixel_format() == PixelFormatRgb32)
+  // giv ae gray scale image after conversion.
+  // TODO(sergeyu): Move this code to a separate routine.
+  // TODO(sergeyu): Optimize this code.
+  DCHECK(capture_data->pixel_format() == PIXEL_FORMAT_RGB32)
       << "Only RGB32 is supported";
   uint8* in = capture_data->data_planes().data[0];
   const int in_stride = capture_data->data_planes().strides[0];
-  uint8* out = yuv_image_.get();
+  uint8* y_out = yuv_image_.get();
+  uint8* u_out = yuv_image_.get() + plane_size;
+  uint8* v_out = yuv_image_.get() + plane_size + plane_size / 4;
   const int out_stride = image_->stride[0];
   for (int i = 0; i < capture_data->height(); ++i) {
     for (int j = 0; j < capture_data->width(); ++j) {
       // Since the input pixel format is RGB32, there are 4 bytes per pixel.
       uint8* pixel = in + 4 * j;
-      out[j] = (pixel[0] + pixel[1] + pixel[2]) / 3;
+      y_out[j] = clip_byte(((pixel[0] * 66 + pixel[1] * 129 +
+                             pixel[2] * 25 + 128) >> 8) + 16);
+      if (i % 2 == 0 && j % 2 == 0) {
+        u_out[j / 2] = clip_byte(((pixel[0] * -38 + pixel[1] * -74 +
+                                   pixel[2] * 112 + 128) >> 8) + 128);
+        v_out[j / 2] = clip_byte(((pixel[0] * 112 + pixel[1] * -94 +
+                                   pixel[2] * -18 + 128) >> 8) + 128);
+      }
     }
     in += in_stride;
-    out += out_stride;
+    y_out += out_stride;
+    if (i % 2 == 0) {
+      u_out += out_stride / 2;
+      v_out += out_stride / 2;
+    }
   }
   return true;
 }
@@ -138,13 +163,10 @@
   vpx_codec_err_t ret = vpx_codec_encode(codec_.get(), image_.get(),
                                          last_timestamp_,
                                          1, 0, VPX_DL_REALTIME);
-  DCHECK(ret == VPX_CODEC_OK) << "Encoding error: "
-                              << vpx_codec_err_to_string(ret)
-                              << "\n"
-                              << "Details: "
-                              << vpx_codec_error(codec_.get())
-                              << "\n"
-                              << vpx_codec_error_detail(codec_.get());
+  DCHECK_EQ(ret, VPX_CODEC_OK)
+      << "Encoding error: " << vpx_codec_err_to_string(ret) << "\n"
+      << "Details: " << vpx_codec_error(codec_.get()) << "\n"
+      << vpx_codec_error_detail(codec_.get());
 
   // TODO(hclam): fix this.
   last_timestamp_ += 100;
@@ -155,16 +177,7 @@
 
   // TODO(hclam): Make sure we get exactly one frame from the packet.
   // TODO(hclam): We should provide the output buffer to avoid one copy.
-  ChromotingHostMessage* message = new ChromotingHostMessage();
-  UpdateStreamPacketMessage* packet = message->mutable_update_stream_packet();
-
-  // Prepare the begin rect.
-  packet->mutable_begin_rect()->set_x(0);
-  packet->mutable_begin_rect()->set_y(0);
-  packet->mutable_begin_rect()->set_width(capture_data->width());
-  packet->mutable_begin_rect()->set_height(capture_data->height());
-  packet->mutable_begin_rect()->set_encoding(EncodingVp8);
-  packet->mutable_begin_rect()->set_pixel_format(PixelFormatYv12);
+  VideoPacket* message = new VideoPacket();
 
   while (!got_data) {
     const vpx_codec_cx_pkt_t* packet = vpx_codec_get_cx_data(codec_.get(),
@@ -175,7 +188,7 @@
     switch (packet->kind) {
       case VPX_CODEC_CX_FRAME_PKT:
         got_data = true;
-        message->mutable_update_stream_packet()->mutable_rect_data()->set_data(
+        message->set_data(
             packet->data.frame.buf, packet->data.frame.sz);
         break;
       default:
@@ -183,11 +196,15 @@
     }
   }
 
-  // Enter the end rect.
-  message->mutable_update_stream_packet()->mutable_end_rect();
-  data_available_callback->Run(
-      message,
-      EncodingStarting | EncodingInProgress | EncodingEnded);
+  message->mutable_format()->set_encoding(VideoPacketFormat::ENCODING_VP8);
+  message->set_flags(VideoPacket::FIRST_PACKET | VideoPacket::LAST_PACKET);
+  message->mutable_format()->set_pixel_format(PIXEL_FORMAT_RGB32);
+  message->mutable_format()->set_x(0);
+  message->mutable_format()->set_y(0);
+  message->mutable_format()->set_width(capture_data->width());
+  message->mutable_format()->set_height(capture_data->height());
+
+  data_available_callback->Run(message);
   delete data_available_callback;
 }
 
diff --git a/remoting/base/encoder_zlib.cc b/remoting/base/encoder_zlib.cc
index ddbe923..e184fd39 100644
--- a/remoting/base/encoder_zlib.cc
+++ b/remoting/base/encoder_zlib.cc
@@ -26,7 +26,7 @@
 void EncoderZlib::Encode(scoped_refptr<CaptureData> capture_data,
                          bool key_frame,
                          DataAvailableCallback* data_available_callback) {
-  CHECK(capture_data->pixel_format() == PixelFormatRgb32)
+  CHECK(capture_data->pixel_format() == PIXEL_FORMAT_RGB32)
       << "Zlib Encoder only works with RGB32. Got "
       << capture_data->pixel_format();
   capture_data_ = capture_data;
@@ -51,24 +51,22 @@
   const int bytes_per_pixel = GetBytesPerPixel(capture_data_->pixel_format());
   const int row_size = bytes_per_pixel * rect.width();
 
-  ChromotingHostMessage* message = new ChromotingHostMessage();
-  RectangleUpdatePacket* update = message->mutable_rectangle_update();
-  PrepareUpdateStart(rect, update);
+  VideoPacket* packet = new VideoPacket();
+  PrepareUpdateStart(rect, packet);
   const uint8* in = capture_data_->data_planes().data[0] +
                     rect.y() * strides +
                     rect.x() * bytes_per_pixel;
   // TODO(hclam): Fill in the sequence number.
-  uint8* out = GetOutputBuffer(update, packet_size_);
+  uint8* out = GetOutputBuffer(packet, packet_size_);
   int filled = 0;
   int row_x = 0;
   int row_y = 0;
   bool compress_again = true;
   while (compress_again) {
     // Prepare a message for sending out.
-    if (!message) {
-      message = new ChromotingHostMessage();
-      update = message->mutable_rectangle_update();
-      out = GetOutputBuffer(update, packet_size_);
+    if (!packet) {
+      packet = new VideoPacket();
+      out = GetOutputBuffer(packet, packet_size_);
       filled = 0;
     }
 
@@ -91,15 +89,14 @@
 
     // We have reached the end of stream.
     if (!compress_again) {
-      update->set_flags(update->flags() | RectangleUpdatePacket::LAST_PACKET);
+      packet->set_flags(packet->flags() | VideoPacket::LAST_PACKET);
     }
 
     // If we have filled the message or we have reached the end of stream.
     if (filled == packet_size_ || !compress_again) {
-      message->mutable_rectangle_update()->mutable_encoded_rect()->
-          resize(filled);
-      SubmitMessage(message, rect_index);
-      message = NULL;
+      packet->mutable_data()->resize(filled);
+      SubmitMessage(packet, rect_index);
+      packet = NULL;
     }
 
     // Reached the end of input row and we're not at the last row.
@@ -112,40 +109,27 @@
 }
 
 void EncoderZlib::PrepareUpdateStart(const gfx::Rect& rect,
-                                     RectangleUpdatePacket* update) {
-
-  update->set_flags(update->flags() | RectangleUpdatePacket::FIRST_PACKET);
-  RectangleFormat* format = update->mutable_format();
+                                     VideoPacket* packet) {
+  packet->set_flags(packet->flags() | VideoPacket::FIRST_PACKET);
+  VideoPacketFormat* format = packet->mutable_format();
 
   format->set_x(rect.x());
   format->set_y(rect.y());
   format->set_width(rect.width());
   format->set_height(rect.height());
-  format->set_encoding(EncodingZlib);
+  format->set_encoding(VideoPacketFormat::ENCODING_ZLIB);
   format->set_pixel_format(capture_data_->pixel_format());
 }
 
-uint8* EncoderZlib::GetOutputBuffer(RectangleUpdatePacket* update,
-                                    size_t size) {
-  update->mutable_encoded_rect()->resize(size);
+uint8* EncoderZlib::GetOutputBuffer(VideoPacket* packet, size_t size) {
+  packet->mutable_data()->resize(size);
   // TODO(ajwong): Is there a better way to do this at all???
   return const_cast<uint8*>(reinterpret_cast<const uint8*>(
-      update->mutable_encoded_rect()->data()));
+      packet->mutable_data()->data()));
 }
 
-void EncoderZlib::SubmitMessage(ChromotingHostMessage* message,
-                                size_t rect_index) {
-  EncodingState state = EncodingInProgress;
-  const RectangleUpdatePacket& update = message->rectangle_update();
-  if (rect_index == 0 &&
-      (update.flags() | RectangleUpdatePacket::FIRST_PACKET)) {
-    state |= EncodingStarting;
-  }
-  if (rect_index == capture_data_->dirty_rects().size() - 1 &&
-      (update.flags() | RectangleUpdatePacket::LAST_PACKET)) {
-    state |= EncodingEnded;
-  }
-  callback_->Run(message, state);
+void EncoderZlib::SubmitMessage(VideoPacket* packet, size_t rect_index) {
+  callback_->Run(packet);
 }
 
 }  // namespace remoting
diff --git a/remoting/base/encoder_zlib.h b/remoting/base/encoder_zlib.h
index 3d0a13e4..6699f03 100644
--- a/remoting/base/encoder_zlib.h
+++ b/remoting/base/encoder_zlib.h
@@ -32,15 +32,14 @@
                   size_t rect_index);
 
   // Marks a packets as the first in a series of rectangle updates.
-  void PrepareUpdateStart(const gfx::Rect& rect,
-                          RectangleUpdatePacket* update);
+  void PrepareUpdateStart(const gfx::Rect& rect, VideoPacket* packet);
 
   // Retrieves a pointer to the output buffer in |update| used for storing the
   // encoded rectangle data.  Will resize the buffer to |size|.
-  uint8* GetOutputBuffer(RectangleUpdatePacket* update, size_t size);
+  uint8* GetOutputBuffer(VideoPacket* packet, size_t size);
 
   // Submit |message| to |callback_|.
-  void SubmitMessage(ChromotingHostMessage* message, size_t rect_index);
+  void SubmitMessage(VideoPacket* packet, size_t rect_index);
 
   scoped_refptr<CaptureData> capture_data_;
   scoped_ptr<DataAvailableCallback> callback_;
diff --git a/remoting/base/multiple_array_input_stream_unittest.cc b/remoting/base/multiple_array_input_stream_unittest.cc
index 4a840e4..1a21add 100644
--- a/remoting/base/multiple_array_input_stream_unittest.cc
+++ b/remoting/base/multiple_array_input_stream_unittest.cc
@@ -46,7 +46,7 @@
   scoped_array<char> buffer(new char[str.size() + 1]);
   buffer[str.size()] = '\0';
   EXPECT_EQ(ReadFromInput(input, buffer.get(), str.size()), str.size());
-  EXPECT_STREQ(str.c_str(), buffer.get());
+  EXPECT_STREQ(str.data(), buffer.get());
 }
 
 // Construct and prepare data in the |output_stream|.
@@ -69,7 +69,7 @@
   }
 
   MultipleArrayInputStream* mstream = new MultipleArrayInputStream();
-  const char* data = kTestData.c_str();
+  const char* data = kTestData.data();
   for (int i = 0; i < segments; ++i) {
     int size = i % 2 == 0 ? 1 : 2;
     mstream->AddBuffer(new net::StringIOBuffer(std::string(data, size)), size);
diff --git a/remoting/base/util.cc b/remoting/base/util.cc
index 8ccbd23..f24d3228 100644
--- a/remoting/base/util.cc
+++ b/remoting/base/util.cc
@@ -10,13 +10,13 @@
 
 int GetBytesPerPixel(PixelFormat format) {
   // Note: The order is important here for performance. This is sorted from the
-  // most common to the less common (PixelFormatAscii is mostly used
+  // most common to the less common (PIXEL_FORMAT_ASCII is mostly used
   // just for testing).
   switch (format) {
-    case PixelFormatRgb24:  return 3;
-    case PixelFormatRgb565: return 2;
-    case PixelFormatRgb32:  return 4;
-    case PixelFormatAscii:  return 1;
+    case PIXEL_FORMAT_RGB24:  return 3;
+    case PIXEL_FORMAT_RGB565: return 2;
+    case PIXEL_FORMAT_RGB32:  return 4;
+    case PIXEL_FORMAT_ASCII:  return 1;
     default:
       NOTREACHED() << "Pixel format not supported";
       return 0;
diff --git a/remoting/base/util.h b/remoting/base/util.h
index e3db289..d7f4128 100644
--- a/remoting/base/util.h
+++ b/remoting/base/util.h
@@ -5,7 +5,7 @@
 #ifndef REMOTING_BASE_UTIL_H_
 #define REMOTING_BASE_UTIL_H_
 
-#include "remoting/proto/event.pb.h"
+#include "remoting/proto/video.pb.h"
 
 namespace remoting {
 
diff --git a/remoting/client/chromoting_client.cc b/remoting/client/chromoting_client.cc
index 95271be..8b46a9d6 100644
--- a/remoting/client/chromoting_client.cc
+++ b/remoting/client/chromoting_client.cc
@@ -131,10 +131,10 @@
     // TODO(ajwong): Change this to use a done callback.
     InitClient(msg->init_client(),
                NewTracedMethod(this, &ChromotingClient::OnMessageDone, msg));
-  } else if (msg->has_rectangle_update()) {
+  } else if (msg->has_video_packet()) {
     ScopedTracer tracer("Handle Rectangle Update");
     rectangle_decoder_->DecodePacket(
-        msg->rectangle_update(),
+        msg->video_packet(),
         NewTracedMethod(this, &ChromotingClient::OnMessageDone, msg));
   } else {
     NOTREACHED() << "Unknown message received";
diff --git a/remoting/client/chromoting_view_unittest.cc b/remoting/client/chromoting_view_unittest.cc
index 4b31435..a57962ef 100644
--- a/remoting/client/chromoting_view_unittest.cc
+++ b/remoting/client/chromoting_view_unittest.cc
@@ -26,7 +26,7 @@
   MOCK_METHOD1(PartialDecode, bool(ChromotingHostMessage* message));
   MOCK_METHOD0(EndDecode, void());
 
-  MOCK_METHOD0(Encoding, UpdateStreamEncoding());
+  MOCK_METHOD0(Encoding, VideoPacketFormat::Encoding());
   MOCK_METHOD0(IsStarted, bool());
 
  private:
@@ -64,7 +64,7 @@
   }
 
   // Testing wrappers for private setup/startup decoder routines.
-  bool setup_decoder(UpdateStreamEncoding encoding) {
+  bool setup_decoder(VideoPacketFormat::Encoding encoding) {
     return SetupDecoder(encoding);
   }
   bool begin_decoding(Task* partial_decode_done, Task* decode_done) {
diff --git a/remoting/client/rectangle_update_decoder.cc b/remoting/client/rectangle_update_decoder.cc
index 8471991..9a77860 100644
--- a/remoting/client/rectangle_update_decoder.cc
+++ b/remoting/client/rectangle_update_decoder.cc
@@ -9,6 +9,7 @@
 #include "media/base/callback.h"
 #include "remoting/base/decoder.h"
 #include "remoting/base/decoder_row_based.h"
+#include "remoting/base/decoder_vp8.h"
 #include "remoting/base/tracer.h"
 #include "remoting/base/util.h"
 #include "remoting/client/frame_consumer.h"
@@ -46,7 +47,7 @@
 RectangleUpdateDecoder::~RectangleUpdateDecoder() {
 }
 
-void RectangleUpdateDecoder::DecodePacket(const RectangleUpdatePacket& packet,
+void RectangleUpdateDecoder::DecodePacket(const VideoPacket& packet,
                                           Task* done) {
   if (message_loop_ != MessageLoop::current()) {
     message_loop_->PostTask(
@@ -70,8 +71,8 @@
                       &RectangleUpdateDecoder::ProcessPacketData,
                       packet, done_runner.release());
 
-  if (packet.flags() | RectangleUpdatePacket::FIRST_PACKET) {
-    const RectangleFormat& format = packet.format();
+  if (packet.flags() | VideoPacket::FIRST_PACKET) {
+    const VideoPacketFormat& format = packet.format();
 
     InitializeDecoder(format, process_packet_data);
   } else {
@@ -81,8 +82,7 @@
 }
 
 void RectangleUpdateDecoder::ProcessPacketData(
-    const RectangleUpdatePacket& packet,
-    Task* done) {
+    const VideoPacket& packet, Task* done) {
   AutoTaskRunner done_runner(done);
 
   if (!decoder_->IsReadyForData()) {
@@ -92,9 +92,9 @@
   }
 
   TraceContext::tracer()->PrintString("Executing Decode.");
-  decoder_->DecodeBytes(packet.encoded_rect());
+  decoder_->DecodeBytes(packet.data());
 
-  if (packet.flags() | RectangleUpdatePacket::LAST_PACKET) {
+  if (packet.flags() | VideoPacket::LAST_PACKET) {
     decoder_->Reset();
 
     UpdatedRects* rects = new UpdatedRects();
@@ -109,39 +109,38 @@
 }
 
 // static
-bool RectangleUpdateDecoder::IsValidPacket(
-    const RectangleUpdatePacket& packet) {
+bool RectangleUpdateDecoder::IsValidPacket(const VideoPacket& packet) {
   if (!packet.IsInitialized()) {
     LOG(WARNING) << "Protobuf consistency checks fail.";
     return false;
   }
 
   // First packet must have a format.
-  if (packet.flags() | RectangleUpdatePacket::FIRST_PACKET) {
+  if (packet.flags() | VideoPacket::FIRST_PACKET) {
     if (!packet.has_format()) {
       LOG(WARNING) << "First packet must have format.";
       return false;
     }
 
     // TODO(ajwong): Verify that we don't need to whitelist encodings.
-    const RectangleFormat& format = packet.format();
+    const VideoPacketFormat& format = packet.format();
     if (!format.has_encoding() ||
-        format.encoding() == EncodingInvalid) {
+        format.encoding() == VideoPacketFormat::ENCODING_INVALID) {
       LOG(WARNING) << "Invalid encoding specified.";
       return false;
     }
   }
 
   // We shouldn't generate null packets.
-  if (!packet.has_encoded_rect()) {
-    LOG(WARNING) << "Packet w/o an encoded rectangle received.";
+  if (!packet.has_data()) {
+    LOG(WARNING) << "Packet w/o data received.";
     return false;
   }
 
   return true;
 }
 
-void RectangleUpdateDecoder::InitializeDecoder(const RectangleFormat& format,
+void RectangleUpdateDecoder::InitializeDecoder(const VideoPacketFormat& format,
                                                Task* done) {
   if (message_loop_ != MessageLoop::current()) {
     message_loop_->PostTask(
@@ -192,12 +191,15 @@
     CHECK(decoder_->Encoding() == format.encoding());
   } else {
     // Initialize a new decoder based on this message encoding.
-    if (format.encoding() == EncodingNone) {
+    if (format.encoding() == VideoPacketFormat::ENCODING_VERBATIM) {
       TraceContext::tracer()->PrintString("Creating Verbatim decoder.");
       decoder_.reset(DecoderRowBased::CreateVerbatimDecoder());
-    } else if (format.encoding() == EncodingZlib) {
+    } else if (format.encoding() == VideoPacketFormat::ENCODING_ZLIB) {
       TraceContext::tracer()->PrintString("Creating Zlib decoder");
       decoder_.reset(DecoderRowBased::CreateZlibDecoder());
+    } else if (format.encoding() == VideoPacketFormat::ENCODING_VP8) {
+      TraceContext::tracer()->PrintString("Creating VP8 decoder");
+      decoder_.reset(new DecoderVp8());
     } else {
       NOTREACHED() << "Invalid Encoding found: " << format.encoding();
     }
diff --git a/remoting/client/rectangle_update_decoder.h b/remoting/client/rectangle_update_decoder.h
index b383c20a..e6c344e4 100644
--- a/remoting/client/rectangle_update_decoder.h
+++ b/remoting/client/rectangle_update_decoder.h
@@ -16,8 +16,8 @@
 
 class Decoder;
 class FrameConsumer;
-class RectangleFormat;
-class RectangleUpdatePacket;
+class VideoPacketFormat;
+class VideoPacket;
 
 // TODO(ajwong): Re-examine this API, especially with regards to how error
 // conditions on each step are reported.  Should they be CHECKs? Logs? Other?
@@ -34,14 +34,14 @@
   //
   // TODO(ajwong): Should packet be a const pointer to make the lifetime
   // more clear?
-  void DecodePacket(const RectangleUpdatePacket& packet, Task* done);
+  void DecodePacket(const VideoPacket& packet, Task* done);
 
  private:
-  static bool IsValidPacket(const RectangleUpdatePacket& packet);
+  static bool IsValidPacket(const VideoPacket& packet);
 
-  void InitializeDecoder(const RectangleFormat& format, Task* done);
+  void InitializeDecoder(const VideoPacketFormat& format, Task* done);
 
-  void ProcessPacketData(const RectangleUpdatePacket& packet, Task* done);
+  void ProcessPacketData(const VideoPacket& packet, Task* done);
 
   // Pointers to infrastructure objects.  Not owned.
   MessageLoop* message_loop_;
diff --git a/remoting/host/capturer.cc b/remoting/host/capturer.cc
index 61148e6..9e63041f 100644
--- a/remoting/host/capturer.cc
+++ b/remoting/host/capturer.cc
@@ -13,7 +13,7 @@
 Capturer::Capturer()
     : width_(0),
       height_(0),
-      pixel_format_(PixelFormatInvalid),
+      pixel_format_(PIXEL_FORMAT_INVALID),
       bytes_per_row_(0),
       current_buffer_(0) {
 }
diff --git a/remoting/host/capturer_fake.cc b/remoting/host/capturer_fake.cc
index 56b43cf..072f19a 100644
--- a/remoting/host/capturer_fake.cc
+++ b/remoting/host/capturer_fake.cc
@@ -8,13 +8,27 @@
 
 namespace remoting {
 
-static const int kWidth = 320;
-static const int kHeight = 240;
+// CapturerFake generates a white picture of size kWidth x kHeight with a
+// rectangle of size kBoxWidth x kBoxHeight. The rectangle moves kSpeed pixels
+// per frame along both axes, and bounces off the sides of the screen.
+static const int kWidth = 800;
+static const int kHeight = 600;
+static const int kBoxWidth = 140;
+static const int kBoxHeight = 140;
+static const int kSpeed = 20;
+
+COMPILE_ASSERT(kBoxWidth < kWidth && kBoxHeight < kHeight, bad_box_size);
+COMPILE_ASSERT((kBoxWidth % kSpeed == 0) && (kWidth % kSpeed == 0) &&
+               (kBoxHeight % kSpeed == 0) && (kHeight % kSpeed == 0),
+               sizes_must_be_multiple_of_kSpeed);
+
 static const int kBytesPerPixel = 4;  // 32 bit RGB is 4 bytes per pixel.
-static const int kMaxColorChannelValue = 255;
 
 CapturerFake::CapturerFake()
-    : seed_(0) {
+    : box_pos_x_(0),
+      box_pos_y_(0),
+      box_speed_x_(kSpeed),
+      box_speed_y_(kSpeed) {
   ScreenConfigurationChanged();
 }
 
@@ -24,7 +38,7 @@
 void CapturerFake::ScreenConfigurationChanged() {
   width_ = kWidth;
   height_ = kHeight;
-  pixel_format_ = PixelFormatRgb32;
+  pixel_format_ = PIXEL_FORMAT_RGB32;
   bytes_per_row_ = width_ * kBytesPerPixel;
 
   // Create memory for the buffers.
@@ -54,16 +68,36 @@
 }
 
 void CapturerFake::GenerateImage() {
-  uint8* row = buffers_[current_buffer_].get();
-  for (int y = 0; y < height_; ++y) {
-    int offset = y % 3;
-    for (int x = 0; x < width_; ++x) {
-      row[x * kBytesPerPixel + offset] = seed_++;
-      seed_ &= kMaxColorChannelValue;
+  memset(buffers_[current_buffer_].get(), 0xff,
+         width_ * height_ * kBytesPerPixel);
+
+  uint8* row = buffers_[current_buffer_].get() +
+      (box_pos_y_ * width_ + box_pos_x_) * kBytesPerPixel;
+
+  box_pos_x_ += box_speed_x_;
+  if (box_pos_x_ + kBoxWidth >= width_ || box_pos_x_ == 0)
+    box_speed_x_ = -box_speed_x_;
+
+  box_pos_y_ += box_speed_y_;
+  if (box_pos_y_ + kBoxHeight >= height_ || box_pos_y_ == 0)
+    box_speed_y_ = -box_speed_y_;
+
+  // Draw rectangle with the following colors in it's corners:
+  //     cyan....yellow
+  //     ..............
+  //     blue.......red
+  for (int y = 0; y < kBoxHeight; ++y) {
+    for (int x = 0; x < kBoxWidth; ++x) {
+      int r = x * 255 / kBoxWidth;
+      int g = y * 255 / kBoxHeight;
+      int b = 255 - (x * 255 / kBoxWidth);
+      row[x * kBytesPerPixel] = r;
+      row[x * kBytesPerPixel+1] = g;
+      row[x * kBytesPerPixel+2] = b;
+      row[x * kBytesPerPixel+3] = 0xff;
     }
     row += bytes_per_row_;
   }
-  ++seed_;
 }
 
 }  // namespace remoting
diff --git a/remoting/host/capturer_fake.h b/remoting/host/capturer_fake.h
index 84cc7ba..46e02661 100644
--- a/remoting/host/capturer_fake.h
+++ b/remoting/host/capturer_fake.h
@@ -10,8 +10,7 @@
 
 namespace remoting {
 
-// A CapturerFake always output an image of 640x480 in 24bit RGB. The image
-// is artificially generated for testing purpose.
+// A CapturerFake generates artificial image for testing purpose.
 //
 // CapturerFake is doubled buffered as required by Capturer. See
 // remoting/host/capturer.h.
@@ -30,8 +29,10 @@
   // Generates an image in the front buffer.
   void GenerateImage();
 
-  // The seed for generating the image.
-  int seed_;
+  int box_pos_x_;
+  int box_pos_y_;
+  int box_speed_x_;
+  int box_speed_y_;
 
   // We have two buffers for the screen images as required by Capturer.
   scoped_array<uint8> buffers_[kNumBuffers];
diff --git a/remoting/host/capturer_fake_ascii.cc b/remoting/host/capturer_fake_ascii.cc
index 1bb9d441..4b259a99 100644
--- a/remoting/host/capturer_fake_ascii.cc
+++ b/remoting/host/capturer_fake_ascii.cc
@@ -21,7 +21,7 @@
 void CapturerFakeAscii::ScreenConfigurationChanged() {
   width_ = kWidth;
   height_ = kHeight;
-  pixel_format_ = PixelFormatAscii;
+  pixel_format_ = PIXEL_FORMAT_ASCII;
   bytes_per_row_ = width_ * kBytesPerPixel;
 
   // Create memory for the buffers.
diff --git a/remoting/host/capturer_gdi.cc b/remoting/host/capturer_gdi.cc
index 209eea4..7742ff90 100644
--- a/remoting/host/capturer_gdi.cc
+++ b/remoting/host/capturer_gdi.cc
@@ -57,7 +57,7 @@
   int rounded_width = (width_ + 3) & (~3);
 
   // Dimensions of screen.
-  pixel_format_ = PixelFormatRgb32;
+  pixel_format_ = PIXEL_FORMAT_RGB32;
   bytes_per_row_ = rounded_width * kBytesPerPixel;
 
   // Create a differ for this screen size.
diff --git a/remoting/host/capturer_linux.cc b/remoting/host/capturer_linux.cc
index fcf7a01..99013d0 100644
--- a/remoting/host/capturer_linux.cc
+++ b/remoting/host/capturer_linux.cc
@@ -235,7 +235,7 @@
 
   scoped_refptr<CaptureData> capture_data(
       new CaptureData(planes, capturer_->width(), capturer_->height(),
-                      PixelFormatRgb32));
+                      PIXEL_FORMAT_RGB32));
 
   for (InvalidRects::const_iterator it = rects.begin();
        it != rects.end();
diff --git a/remoting/host/capturer_mac.cc b/remoting/host/capturer_mac.cc
index 6d6b6cc..3eafa34 100644
--- a/remoting/host/capturer_mac.cc
+++ b/remoting/host/capturer_mac.cc
@@ -47,7 +47,7 @@
   width_ = CGDisplayPixelsWide(mainDevice);
   height_ = CGDisplayPixelsHigh(mainDevice);
   bytes_per_row_ = width_ * sizeof(uint32_t);
-  pixel_format_ = PixelFormatRgb32;
+  pixel_format_ = PIXEL_FORMAT_RGB32;
   size_t buffer_size = height() * bytes_per_row_;
   for (int i = 0; i < kNumBuffers; ++i) {
     buffers_[i].reset(new uint8[buffer_size]);
diff --git a/remoting/host/client_connection.cc b/remoting/host/client_connection.cc
index 8063d04..e325bf7 100644
--- a/remoting/host/client_connection.cc
+++ b/remoting/host/client_connection.cc
@@ -55,15 +55,20 @@
   video_writer_.SendMessage(msg);
 }
 
-void ClientConnection::SendUpdateStreamPacketMessage(
-    const ChromotingHostMessage& message) {
+void ClientConnection::SendVideoPacket(const VideoPacket& packet) {
   DCHECK_EQ(loop_, MessageLoop::current());
 
   // If we are disconnected then return.
   if (!connection_)
     return;
 
-  video_writer_.SendMessage(message);
+  ChromotingHostMessage* message = new ChromotingHostMessage();
+  // TODO(sergeyu): avoid memcopy here.
+  *message->mutable_video_packet() = packet;
+
+  video_writer_.SendMessage(*message);
+
+  delete message;
 }
 
 int ClientConnection::GetPendingUpdateStreamMessages() {
diff --git a/remoting/host/client_connection.h b/remoting/host/client_connection.h
index 69283e2..38e383cb 100644
--- a/remoting/host/client_connection.h
+++ b/remoting/host/client_connection.h
@@ -64,8 +64,7 @@
   virtual void SendInitClientMessage(int width, int height);
 
   // Send encoded update stream data to the viewer.
-  virtual void SendUpdateStreamPacketMessage(
-      const ChromotingHostMessage& message);
+  virtual void SendVideoPacket(const VideoPacket& packet);
 
   // Gets the number of update stream messages not yet transmitted.
   // Note that the value returned is an estimate using average size of the
diff --git a/remoting/host/client_connection_unittest.cc b/remoting/host/client_connection_unittest.cc
index 240d8f47..b5802cf6 100644
--- a/remoting/host/client_connection_unittest.cc
+++ b/remoting/host/client_connection_unittest.cc
@@ -46,8 +46,8 @@
 
 TEST_F(ClientConnectionTest, SendUpdateStream) {
   // Then send the actual data.
-  ChromotingHostMessage message;
-  viewer_->SendUpdateStreamPacketMessage(message);
+  VideoPacket packet;
+  viewer_->SendVideoPacket(packet);
 
   // And then close the connection to ClientConnection.
   viewer_->Disconnect();
@@ -76,8 +76,8 @@
   message_loop_.RunAllPending();
   EXPECT_TRUE(connection_->is_closed());
 
-  ChromotingHostMessage message;
-  viewer_->SendUpdateStreamPacketMessage(message);
+  VideoPacket packet;
+  viewer_->SendVideoPacket(packet);
   viewer_->Disconnect();
   message_loop_.RunAllPending();
 
diff --git a/remoting/host/mock_objects.h b/remoting/host/mock_objects.h
index 50c94ef..428bd749 100644
--- a/remoting/host/mock_objects.h
+++ b/remoting/host/mock_objects.h
@@ -47,10 +47,7 @@
 
   MOCK_METHOD1(Init, void(ChromotingConnection* connection));
   MOCK_METHOD2(SendInitClientMessage, void(int width, int height));
-  MOCK_METHOD0(SendBeginUpdateStreamMessage, void());
-  MOCK_METHOD1(SendUpdateStreamPacketMessage,
-               void(const ChromotingHostMessage& message));
-  MOCK_METHOD0(SendEndUpdateStreamMessage, void());
+  MOCK_METHOD1(SendVideoPacket, void(const VideoPacket& packet));
   MOCK_METHOD0(GetPendingUpdateStreamMessages, int());
   MOCK_METHOD0(Disconnect, void());
 
diff --git a/remoting/host/session_manager.cc b/remoting/host/session_manager.cc
index a61a6bdd..1867f07 100644
--- a/remoting/host/session_manager.cc
+++ b/remoting/host/session_manager.cc
@@ -328,18 +328,16 @@
   ScheduleNextRateControl();
 }
 
-void SessionManager::DoSendUpdate(ChromotingHostMessage* message,
-                                  Encoder::EncodingState state) {
+void SessionManager::DoSendVideoPacket(VideoPacket* packet) {
   DCHECK_EQ(network_loop_, MessageLoop::current());
 
   TraceContext::tracer()->PrintString("DoSendUpdate");
 
   for (ClientConnectionList::const_iterator i = clients_.begin();
        i < clients_.end(); ++i) {
-    (*i)->SendUpdateStreamPacketMessage(*message);
+    (*i)->SendVideoPacket(*packet);
   }
-
-  delete message;
+  delete packet;
 
   TraceContext::tracer()->PrintString("DoSendUpdate done");
 }
@@ -399,19 +397,20 @@
   TraceContext::tracer()->PrintString("Encode Done");
 }
 
-void SessionManager::EncodeDataAvailableTask(
-    ChromotingHostMessage* message, Encoder::EncodingState state) {
+void SessionManager::EncodeDataAvailableTask(VideoPacket* packet) {
   DCHECK_EQ(encode_loop_, MessageLoop::current());
 
+  bool last = (packet->flags() & VideoPacket::LAST_PACKET) != 0;
+
   // Before a new encode task starts, notify clients a new update
   // stream is coming.
   // Notify this will keep a reference to the DataBuffer in the
   // task. The ownership will eventually pass to the ClientConnections.
   network_loop_->PostTask(
       FROM_HERE,
-      NewTracedMethod(this, &SessionManager::DoSendUpdate, message, state));
+      NewTracedMethod(this, &SessionManager::DoSendVideoPacket, packet));
 
-  if (state & Encoder::EncodingEnded) {
+  if (last) {
     capture_loop_->PostTask(
         FROM_HERE, NewTracedMethod(this, &SessionManager::DoFinishEncode));
   }
diff --git a/remoting/host/session_manager.h b/remoting/host/session_manager.h
index 46e02df..fedfc4f 100644
--- a/remoting/host/session_manager.h
+++ b/remoting/host/session_manager.h
@@ -14,8 +14,7 @@
 #include "base/time.h"
 #include "remoting/base/encoder.h"
 #include "remoting/host/capturer.h"
-// TODO(hclam): This class should not know the internal protobuf types.
-#include "remoting/proto/internal.pb.h"
+#include "remoting/proto/video.pb.h"
 
 namespace remoting {
 
@@ -126,8 +125,7 @@
   void DoRateControl();
 
   // DoSendUpdate takes ownership of header and is responsible for deleting it.
-  void DoSendUpdate(ChromotingHostMessage* message,
-                    Encoder::EncodingState state);
+  void DoSendVideoPacket(VideoPacket* packet);
   void DoSendInit(scoped_refptr<ClientConnection> client,
                   int width, int height);
 
@@ -141,8 +139,7 @@
 
   // EncodeDataAvailableTask takes ownership of header and is responsible for
   // deleting it.
-  void EncodeDataAvailableTask(ChromotingHostMessage* message,
-                               Encoder::EncodingState state);
+  void EncodeDataAvailableTask(VideoPacket* packet);
 
   // Message loops used by this class.
   MessageLoop* capture_loop_;
diff --git a/remoting/host/session_manager_unittest.cc b/remoting/host/session_manager_unittest.cc
index 9aa34d8a..f3870e21 100644
--- a/remoting/host/session_manager_unittest.cc
+++ b/remoting/host/session_manager_unittest.cc
@@ -19,8 +19,9 @@
 
 static const int kWidth = 640;
 static const int kHeight = 480;
-static const PixelFormat kFormat = PixelFormatRgb32;
-static const UpdateStreamEncoding kEncoding = EncodingNone;
+static const PixelFormat kFormat = PIXEL_FORMAT_RGB32;
+static const VideoPacketFormat::Encoding kEncoding =
+    VideoPacketFormat::ENCODING_VERBATIM;
 
 class SessionManagerTest : public testing::Test {
  public:
@@ -64,10 +65,7 @@
 }
 
 ACTION_P(FinishEncode, msg) {
-  Encoder::EncodingState state = (Encoder::EncodingStarting |
-                                  Encoder::EncodingInProgress |
-                                  Encoder::EncodingEnded);
-  arg2->Run(msg, state);
+  arg2->Run(msg);
   delete arg2;
 }
 
@@ -98,14 +96,12 @@
       .WillOnce(RunCallback(update_rects, data));
 
   // Expect the encoder be called.
-  ChromotingHostMessage* msg = new ChromotingHostMessage();
+  VideoPacket* packet = new VideoPacket();
   EXPECT_CALL(*encoder_, Encode(data, false, NotNull()))
-      .WillOnce(FinishEncode(msg));
+      .WillOnce(FinishEncode(packet));
 
   // Expect the client be notified.
-  EXPECT_CALL(*client_, SendBeginUpdateStreamMessage());
-  EXPECT_CALL(*client_, SendUpdateStreamPacketMessage(_));
-  EXPECT_CALL(*client_, SendEndUpdateStreamMessage());
+  EXPECT_CALL(*client_, SendVideoPacket(_));
   EXPECT_CALL(*client_, GetPendingUpdateStreamMessages())
       .Times(AtLeast(0))
       .WillRepeatedly(Return(0));
diff --git a/remoting/host/simple_host_process.cc b/remoting/host/simple_host_process.cc
index d998ef9..2733850 100644
--- a/remoting/host/simple_host_process.cc
+++ b/remoting/host/simple_host_process.cc
@@ -25,14 +25,18 @@
 #include "base/logging.h"
 #include "base/mac/scoped_nsautorelease_pool.h"
 #include "base/nss_util.h"
+#include "base/path_service.h"
 #include "base/thread.h"
+#include "media/base/media.h"
 #include "remoting/base/encoder_verbatim.h"
+#include "remoting/base/encoder_vp8.h"
 #include "remoting/base/encoder_zlib.h"
+#include "remoting/base/tracer.h"
 #include "remoting/host/capturer_fake.h"
 #include "remoting/host/chromoting_host.h"
 #include "remoting/host/chromoting_host_context.h"
 #include "remoting/host/json_host_config.h"
-#include "remoting/base/tracer.h"
+#include "remoting/proto/video.pb.h"
 
 #if defined(OS_WIN)
 #include "remoting/host/capturer_gdi.h"
@@ -62,6 +66,7 @@
 const std::string kFakeSwitchName = "fake";
 const std::string kConfigSwitchName = "config";
 const std::string kVerbatimSwitchName = "verbatim";
+const std::string kVp8SwitchName = "vp8";
 
 int main(int argc, char** argv) {
   // Needed for the Mac, so we don't leak objects when threads are created.
@@ -92,14 +97,15 @@
   // Check the argument to see if we should use a fake capturer and encoder.
   bool fake = cmd_line->HasSwitch(kFakeSwitchName);
   bool verbatim = cmd_line->HasSwitch(kVerbatimSwitchName);
+  bool vp8 = cmd_line->HasSwitch(kVp8SwitchName);
 
 #if defined(OS_WIN)
-  std::wstring path = GetEnvironmentVar(kHomeDrive);
-  path += GetEnvironmentVar(kHomePath);
+  std::wstring home_path = GetEnvironmentVar(kHomeDrive);
+  home_path += GetEnvironmentVar(kHomePath);
 #else
-  std::string path = GetEnvironmentVar(base::env_vars::kHome);
+  std::string home_path = GetEnvironmentVar(base::env_vars::kHome);
 #endif
-  FilePath config_path(path);
+  FilePath config_path(home_path);
   config_path = config_path.Append(kDefaultConfigPath);
   if (cmd_line->HasSwitch(kConfigSwitchName)) {
     config_path = cmd_line->GetSwitchValuePath(kConfigSwitchName);
@@ -116,6 +122,14 @@
     encoder.reset(new remoting::EncoderVerbatim());
   }
 
+  // TODO(sergeyu): Enable VP8 on ARM builds.
+#if !defined(ARCH_CPU_ARM_FAMILY)
+  if (vp8) {
+    LOG(INFO) << "Using the verbatim encoder.";
+    encoder.reset(new remoting::EncoderVp8());
+  }
+#endif
+
   base::Thread file_io_thread("FileIO");
   file_io_thread.Start();
 
@@ -132,6 +146,11 @@
   remoting::ChromotingHostContext context;
   context.Start();
 
+  FilePath module_path;
+  PathService::Get(base::DIR_MODULE, &module_path);
+  CHECK(media::InitializeMediaLibrary(module_path))
+      << "Cannot load media library";
+
   // Construct a chromoting host.
   scoped_refptr<remoting::ChromotingHost> host(
       new remoting::ChromotingHost(&context,
diff --git a/remoting/proto/chromotocol.gyp b/remoting/proto/chromotocol.gyp
index 3d55d21..faab3e7 100644
--- a/remoting/proto/chromotocol.gyp
+++ b/remoting/proto/chromotocol.gyp
@@ -16,6 +16,7 @@
         'control.proto',
         'event.proto',
         'internal.proto',
+        'video.proto',
       ],
       'rules': [
         {
@@ -71,6 +72,8 @@
         '<(out_dir)/event.pb.h',
         '<(out_dir)/internal.pb.cc',
         '<(out_dir)/internal.pb.h',
+        '<(out_dir)/video.pb.cc',
+        '<(out_dir)/video.pb.h',
       ],
     },
   ],
diff --git a/remoting/proto/event.proto b/remoting/proto/event.proto
index 96c8fce..2526d78 100644
--- a/remoting/proto/event.proto
+++ b/remoting/proto/event.proto
@@ -1,7 +1,7 @@
 // Copyright (c) 2010 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
-//
+
 // Protocol for event messages.
 
 syntax = "proto2";
@@ -10,141 +10,6 @@
 
 package remoting;
 
-// A message that gets sent to the client after the client is connected to the
-// host. It contains information that the client needs to know about the host.
-// NEXT ID: 3
-message InitClientMessage {
-  required int32 width = 1;
-  required int32 height = 2;
-}
-
-// A message to denote the beginning of an update stream. It will be followed
-// by 0 or more UpdateStreamPacketMessages and then a EndUpdateStreamMessage.
-// NEXT ID: 1
-message BeginUpdateStreamMessage {
-}
-
-// A message to denote the end of an update stream.
-// NEXT ID: 1
-message EndUpdateStreamMessage {
-}
-
-// Identifies how the image was encoded.
-enum UpdateStreamEncoding {
-  EncodingInvalid = -1;
-  EncodingNone = 0;
-  EncodingZlib = 1;
-  EncodingVp8 = 2;
-}
-
-// Identifies the pixel format.
-// Note that this list should match exactly the same as
-// media::VideoFrame::Format in media/base/video_frame.h.
-enum PixelFormat {
-  PixelFormatInvalid = 0;
-  PixelFormatRgb555 = 1;
-  PixelFormatRgb565 = 2;
-  PixelFormatRgb24 = 3;
-  PixelFormatRgb32 = 4;
-  PixelFormatRgba = 5;
-  PixelFormatYv12 = 6;
-  PixelFormatYv16 = 7;
-  PixelFormatNv12 = 8;
-  PixelFormatEmpty = 9;
-  PixelFormatAscii = 10;
-}
-
-// A message that denotes the beginning of an updating rectangle in an update
-// stream packet.
-// NEXT ID: 6
-message UpdateStreamBeginRect {
-  // X,Y coordinates (in screen pixels) for origin of this update.
-  required int32 x = 1;
-  required int32 y = 2;
-
-  // Width, height (in screen pixels) for this update.
-  required int32 width = 3;
-  required int32 height = 4;
-
-  // The encoding used for this image update.
-  optional UpdateStreamEncoding encoding = 5 [default=EncodingNone];
-
-  // The pixel format of this image.
-  optional PixelFormat pixel_format = 6 [default=PixelFormatRgb24];
-}
-
-// A message that contains partial data for updating an rectangle in an
-// update stream packet.
-// NEXT ID: 3
-message UpdateStreamRectData {
-  // The sequence number of the partial data for updating a rectangle.
-  optional int32 sequence_number = 1 [default=0];
-
-  // The partial data for updating a rectangle.
-  required bytes data = 2;
-}
-
-// A message that denotes the end of an updating rectangle.
-// NEXT ID: 1
-message UpdateStreamEndRect {
-}
-
-// A message to denote a partial update stream.
-// NEXT ID: 4
-message UpdateStreamPacketMessage {
-  optional UpdateStreamBeginRect begin_rect = 1;
-  optional UpdateStreamRectData rect_data = 2;
-  optional UpdateStreamEndRect end_rect = 3;
-}
-
-// TODO(ajwong): Determine if these fields should be optional or required.
-message RectangleFormat {
-  // X,Y coordinates (in screen pixels) for origin of this update.
-  required int32 x = 1;
-  required int32 y = 2;
-
-  // Width, height (in screen pixels) for this update.
-  required int32 width = 3;
-  required int32 height = 4;
-
-  // The encoding used for this image update.
-  optional UpdateStreamEncoding encoding = 5 [default = EncodingInvalid];
-
-  // The pixel format of this image.
-  optional PixelFormat pixel_format = 6 [default = PixelFormatRgb24];
-}
-
-message RectangleUpdatePacket {
-  // Bitmasks for use in the flags field below.
-  //
-  // The encoder may fragment one update into multiple packets depending on
-  // how the encoder outputs data.  Thus, one update can logically consist of
-  // multiple packets.  The FIRST_PACKET and LAST_PACKET flags are used to
-  // indicate the start and end of a logical update.  Here are notable
-  // consequences:
-  //  * Both FIRST_PACKET and LAST_PACKET may be set if an update is only
-  //    one packet long.
-  //  * The RectangleFormat is only supplied in a FIRST_PACKET.
-  //  * An local update cannot change format between a FIRST_PACKET and
-  //    a LAST_PACKET.
-  //  * All packets in one logical update must be processed in order, and
-  //    packets may not be skipped.
-  enum Flags {
-    FIRST_PACKET = 1;
-    LAST_PACKET = 2;
-  }
-  optional int32 flags = 1 [default = 0];
-
-  // The sequence number of the partial data for updating a rectangle.
-  optional int32 sequence_number = 2 [default = 0];
-
-  // This is provided on the first packet of the rectangle data, when
-  // the flags has FIRST_PACKET set.
-  optional RectangleFormat format = 3;
-
-  optional bytes encoded_rect = 4;
-}
-
 // Defines a keyboard event.
 // NEXT ID: 3
 message KeyEvent {
diff --git a/remoting/proto/internal.proto b/remoting/proto/internal.proto
index 2e74501..3319420 100644
--- a/remoting/proto/internal.proto
+++ b/remoting/proto/internal.proto
@@ -1,13 +1,14 @@
 // Copyright (c) 2010 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
-//
+
 // Internal messages as a unit for transmission in the wire.
 
 syntax = "proto2";
 
 import "control.proto";
 import "event.proto";
+import "video.proto";
 
 option optimize_for = LITE_RUNTIME;
 
@@ -18,10 +19,7 @@
 // NEXT ID: 5
 message ChromotingHostMessage {
   optional InitClientMessage init_client= 1;
-  optional BeginUpdateStreamMessage begin_update_stream = 2;
-  optional EndUpdateStreamMessage end_update_stream = 3;
-  optional UpdateStreamPacketMessage update_stream_packet = 4;
-  optional RectangleUpdatePacket rectangle_update = 5;
+  optional VideoPacket video_packet = 2;
 }
 
 // Defines the message that is sent from the client to the host.
diff --git a/remoting/proto/video.proto b/remoting/proto/video.proto
new file mode 100644
index 0000000..c91a0a3
--- /dev/null
+++ b/remoting/proto/video.proto
@@ -0,0 +1,93 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Protocol for video messages.
+
+syntax = "proto2";
+
+option optimize_for = LITE_RUNTIME;
+
+package remoting;
+
+// A message that gets sent to the client after the client is connected to the
+// host. It contains information that the client needs to know about the host.
+// NEXT ID: 3
+// TODO(sergeyu): Move to the control channel.
+message InitClientMessage {
+  required int32 width = 1;
+  required int32 height = 2;
+}
+
+// Identifies the pixel format.
+// Note that this list should match exactly the same as
+// media::VideoFrame::Format in media/base/video_frame.h.
+enum PixelFormat {
+  PIXEL_FORMAT_INVALID = 0;
+  PIXEL_FORMAT_RGB555 = 1;
+  PIXEL_FORMAT_RGB565 = 2;
+  PIXEL_FORMAT_RGB24 = 3;
+  PIXEL_FORMAT_RGB32 = 4;
+  PIXEL_FORMAT_RGBA = 5; 
+  PIXEL_FORMAT_YV12 = 6;
+  PIXEL_FORMAT_YV16 = 7;
+  PIXEL_FORMAT_NV12 = 8;
+  PIXEL_FORMAT_EMPTY = 9;
+  PIXEL_FORMAT_ASCII = 10;
+}
+
+// TODO(ajwong): Determine if these fields should be optional or required.
+message VideoPacketFormat {
+  // Identifies how the image was encoded.
+  enum Encoding {
+    ENCODING_INVALID = -1;
+    ENCODING_VERBATIM = 0;
+    ENCODING_ZLIB = 1;
+    ENCODING_VP8 = 2;
+  };
+
+  // X,Y coordinates (in screen pixels) for origin of this update.
+  optional int32 x = 1;
+  optional int32 y = 2;
+
+  // Width, height (in screen pixels) for this update.
+  optional int32 width = 3;
+  optional int32 height = 4;
+
+  // The encoding used for this image update.
+  optional Encoding encoding = 5 [default = ENCODING_INVALID];
+
+  // The pixel format of this image.
+  optional PixelFormat pixel_format = 6 [default = PIXEL_FORMAT_RGB24];
+}
+
+message VideoPacket {
+  // Bitmasks for use in the flags field below.
+  //
+  // The encoder may fragment one update into multiple packets depending on
+  // how the encoder outputs data.  Thus, one update can logically consist of
+  // multiple packets.  The FIRST_PACKET and LAST_PACKET flags are used to
+  // indicate the start and end of a logical update.  Here are notable
+  // consequences:
+  //  * Both FIRST_PACKET and LAST_PACKET may be set if an update is only
+  //    one packet long.
+  //  * The VideoPacketFormat is only supplied in a FIRST_PACKET.
+  //  * An local update cannot change format between a FIRST_PACKET and
+  //    a LAST_PACKET.
+  //  * All packets in one logical update must be processed in order, and
+  //    packets may not be skipped.
+  enum Flags {
+    FIRST_PACKET = 1;
+    LAST_PACKET = 2;
+  }
+  optional int32 flags = 1 [default = 0];
+
+  // The sequence number of the partial data for updating a rectangle.
+  optional int32 sequence_number = 2 [default = 0];
+
+  // This is provided on the first packet of the rectangle data, when
+  // the flags has FIRST_PACKET set.
+  optional VideoPacketFormat format = 3;
+
+  optional bytes data = 4;
+}
diff --git a/remoting/protocol/host_message_dispatcher.cc b/remoting/protocol/host_message_dispatcher.cc
index 768789e..148dccf6 100644
--- a/remoting/protocol/host_message_dispatcher.cc
+++ b/remoting/protocol/host_message_dispatcher.cc
@@ -6,6 +6,7 @@
 #include "remoting/base/multiple_array_input_stream.h"
 #include "remoting/proto/control.pb.h"
 #include "remoting/proto/event.pb.h"
+#include "remoting/proto/video.pb.h"
 #include "remoting/protocol/chromotocol_connection.h"
 #include "remoting/protocol/host_message_dispatcher.h"
 #include "remoting/protocol/host_control_message_handler.h"
diff --git a/remoting/protocol/message_decoder_unittest.cc b/remoting/protocol/message_decoder_unittest.cc
index a2d4b203..90c66802 100644
--- a/remoting/protocol/message_decoder_unittest.cc
+++ b/remoting/protocol/message_decoder_unittest.cc
@@ -41,18 +41,8 @@
 
   // Then append 10 update sequences to the data.
   for (int i = 0; i < 10; ++i) {
-    msg.mutable_begin_update_stream();
-    AppendMessage(msg, &encoded_data);
-    msg.Clear();
-
-    msg.mutable_update_stream_packet()->mutable_rect_data()->
-        set_sequence_number(0);
-    msg.mutable_update_stream_packet()->mutable_rect_data()->
-        set_data(kTestData);
-    AppendMessage(msg, &encoded_data);
-    msg.Clear();
-
-    msg.mutable_end_update_stream();
+    msg.mutable_video_packet()->set_sequence_number(0);
+    msg.mutable_video_packet()->set_data(kTestData);
     AppendMessage(msg, &encoded_data);
     msg.Clear();
   }
@@ -90,30 +80,19 @@
   }
 
   // Then verify the decoded messages.
-  EXPECT_EQ(31u, message_list.size());
+  EXPECT_EQ(11u, message_list.size());
   EXPECT_TRUE(message_list.front()->has_init_client());
   delete message_list.front();
   message_list.pop_front();
 
-  int index = 0;
   for (std::list<ChromotingHostMessage*>::iterator it =
            message_list.begin();
        it != message_list.end(); ++it) {
     ChromotingHostMessage* message = *it;
-    int type = index % 3;
-    ++index;
-    if (type == 0) {
-      // Begin update stream.
-      EXPECT_TRUE(message->has_begin_update_stream());
-    } else if (type == 1) {
-      // Partial update stream.
-      EXPECT_TRUE(message->has_update_stream_packet());
-      EXPECT_EQ(kTestData,
-                message->update_stream_packet().rect_data().data());
-    } else if (type == 2) {
-      // End update stream.
-      EXPECT_TRUE(message->has_end_update_stream());
-    }
+    // Partial update stream.
+    EXPECT_TRUE(message->has_video_packet());
+    EXPECT_EQ(kTestData,
+              message->video_packet().data().data());
   }
   STLDeleteElements(&message_list);
 }
diff --git a/remoting/remoting.gyp b/remoting/remoting.gyp
index ec321658..8661eb3 100644
--- a/remoting/remoting.gyp
+++ b/remoting/remoting.gyp
@@ -138,8 +138,8 @@
         'base/constants.cc',
         'base/constants.h',
         'base/decoder.h',
-# BUG57374,BUG57266       'base/decoder_vp8.cc',
-# BUG57374,BUG57266       'base/decoder_vp8.h',
+        'base/decoder_vp8.cc',
+        'base/decoder_vp8.h',
         'base/decoder_row_based.cc',
         'base/decoder_row_based.h',
         'base/decompressor.h',
@@ -150,8 +150,8 @@
         'base/encoder.h',
         'base/encoder_verbatim.cc',
         'base/encoder_verbatim.h',
-# BUG57374       'base/encoder_vp8.cc',
-# BUG57374       'base/encoder_vp8.h',
+        'base/encoder_vp8.cc',
+        'base/encoder_vp8.h',
         'base/encoder_zlib.cc',
         'base/encoder_zlib.h',
         'base/multiple_array_input_stream.cc',
@@ -162,6 +162,16 @@
         'base/util.cc',
         'base/util.h',
       ],
+      'conditions': [
+        ['target_arch=="arm"', {
+          'sources!': [
+            'base/decoder_vp8.cc',
+            'base/decoder_vp8.h',
+            'base/encoder_vp8.cc',
+            'base/encoder_vp8.h',
+          ],
+        }],
+      ],
     },  # end of target 'chromoting_base'
 
     {
@@ -420,11 +430,11 @@
 # BUG57351        'base/codec_test.cc',
 # BUG57351        'base/codec_test.h',
         'base/compressor_zlib_unittest.cc',
-# BUG57374        'base/decoder_vp8_unittest.cc',
+# BUG57351        'base/decoder_vp8_unittest.cc',
         'base/decompressor_zlib_unittest.cc',
 # BUG57351        'base/encode_decode_unittest.cc',
 # BUG57351        'base/encoder_verbatim_unittest.cc',
-# BUG57374        'base/encoder_vp8_unittest.cc',
+# BUG57351        'base/encoder_vp8_unittest.cc',
 # BUG57351        'base/encoder_zlib_unittest.cc',
         'base/mock_objects.h',
         'base/multiple_array_input_stream_unittest.cc',