googleapi: remove size sniffing, send chunked multipart/mime request bodies

In the past, the Google API server required Content-Lengths on
requests and didn't accept chunked uploads.

That appears to be fixed now, so remove a bunch of complexity in
trying to calculate the size of request bodies, the worst of which
involved slurping the whole content into memory (which might fail
and/or use all available memory).

Change-Id: I8995204d604baf08237593b7a077f4dbfd5b7b11
Reviewed-on: https://code-review.googlesource.com/1870
Reviewed-by: Glenn Lewis <gmlewis@google.com>
diff --git a/adexchangebuyer/v1.2/adexchangebuyer-gen.go b/adexchangebuyer/v1.2/adexchangebuyer-gen.go
index 0fbe444..299500d 100644
--- a/adexchangebuyer/v1.2/adexchangebuyer-gen.go
+++ b/adexchangebuyer/v1.2/adexchangebuyer-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "adexchangebuyer:v1.2"
 const apiName = "adexchangebuyer"
diff --git a/adexchangebuyer/v1.3/adexchangebuyer-gen.go b/adexchangebuyer/v1.3/adexchangebuyer-gen.go
index 04e265d..524683b 100644
--- a/adexchangebuyer/v1.3/adexchangebuyer-gen.go
+++ b/adexchangebuyer/v1.3/adexchangebuyer-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "adexchangebuyer:v1.3"
 const apiName = "adexchangebuyer"
diff --git a/adexchangeseller/v1.1/adexchangeseller-api.json b/adexchangeseller/v1.1/adexchangeseller-api.json
index ab19a98..a0fe938 100644
--- a/adexchangeseller/v1.1/adexchangeseller-api.json
+++ b/adexchangeseller/v1.1/adexchangeseller-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/fcWyClHZWfYoiPcBzxEos_wQ_94\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/Ob-vIbU0CMw39spLQLZEQavYp3Q\"",
  "discoveryVersion": "v1",
  "id": "adexchangeseller:v1.1",
  "name": "adexchangeseller",
  "canonicalName": "Ad Exchange Seller",
  "version": "v1.1",
- "revision": "20141222",
+ "revision": "20141112",
  "title": "Ad Exchange Seller API",
  "description": "Gives Ad Exchange seller users access to their inventory and the ability to generate reports",
  "ownerDomain": "google.com",
diff --git a/adexchangeseller/v1.1/adexchangeseller-gen.go b/adexchangeseller/v1.1/adexchangeseller-gen.go
index d9e5aa0..cb7ec8a 100644
--- a/adexchangeseller/v1.1/adexchangeseller-gen.go
+++ b/adexchangeseller/v1.1/adexchangeseller-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "adexchangeseller:v1.1"
 const apiName = "adexchangeseller"
diff --git a/adexchangeseller/v1/adexchangeseller-api.json b/adexchangeseller/v1/adexchangeseller-api.json
index cf8682d..a6770b2 100644
--- a/adexchangeseller/v1/adexchangeseller-api.json
+++ b/adexchangeseller/v1/adexchangeseller-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/rndP_70v9IVpLK84zn_P-mVlnXI\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/kgKc4O1bceVftsMmf3p1OXF57gg\"",
  "discoveryVersion": "v1",
  "id": "adexchangeseller:v1",
  "name": "adexchangeseller",
  "canonicalName": "Ad Exchange Seller",
  "version": "v1",
- "revision": "20141222",
+ "revision": "20141112",
  "title": "Ad Exchange Seller API",
  "description": "Gives Ad Exchange seller users access to their inventory and the ability to generate reports",
  "ownerDomain": "google.com",
diff --git a/adexchangeseller/v1/adexchangeseller-gen.go b/adexchangeseller/v1/adexchangeseller-gen.go
index 7f59753..c8a8806 100644
--- a/adexchangeseller/v1/adexchangeseller-gen.go
+++ b/adexchangeseller/v1/adexchangeseller-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "adexchangeseller:v1"
 const apiName = "adexchangeseller"
diff --git a/adexchangeseller/v2.0/adexchangeseller-api.json b/adexchangeseller/v2.0/adexchangeseller-api.json
index 6638832..df5225c 100644
--- a/adexchangeseller/v2.0/adexchangeseller-api.json
+++ b/adexchangeseller/v2.0/adexchangeseller-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/L4L12_fshxMlVyFutADZBQXnkNs\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/6aC1_omptpkys969gj83ZtJAvZ8\"",
  "discoveryVersion": "v1",
  "id": "adexchangeseller:v2.0",
  "name": "adexchangeseller",
  "canonicalName": "Ad Exchange Seller",
  "version": "v2.0",
- "revision": "20141222",
+ "revision": "20141112",
  "title": "Ad Exchange Seller API",
  "description": "Gives Ad Exchange seller users access to their inventory and the ability to generate reports",
  "ownerDomain": "google.com",
diff --git a/adexchangeseller/v2.0/adexchangeseller-gen.go b/adexchangeseller/v2.0/adexchangeseller-gen.go
index 83eef51..4098215 100644
--- a/adexchangeseller/v2.0/adexchangeseller-gen.go
+++ b/adexchangeseller/v2.0/adexchangeseller-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "adexchangeseller:v2.0"
 const apiName = "adexchangeseller"
diff --git a/admin/directory_v1/admin-gen.go b/admin/directory_v1/admin-gen.go
index d76d9b6..5b1f618 100644
--- a/admin/directory_v1/admin-gen.go
+++ b/admin/directory_v1/admin-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "admin:directory_v1"
 const apiName = "admin"
diff --git a/admin/email_migration_v2/admin-gen.go b/admin/email_migration_v2/admin-gen.go
index e9687b4..03c1879 100644
--- a/admin/email_migration_v2/admin-gen.go
+++ b/admin/email_migration_v2/admin-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "admin:email_migration_v2"
 const apiName = "admin"
@@ -102,11 +104,15 @@
 // method id "emailMigration.mail.insert":
 
 type MailInsertCall struct {
-	s        *Service
-	userKey  string
-	mailitem *MailItem
-	opt_     map[string]interface{}
-	media_   io.Reader
+	s          *Service
+	userKey    string
+	mailitem   *MailItem
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Insert Mail into Google's Gmail backends
@@ -116,8 +122,32 @@
 	c.mailitem = mailitem
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *MailInsertCall) Media(r io.Reader) *MailInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *MailInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *MailInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *MailInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *MailInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -142,20 +172,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "{userKey}/mail")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"userKey": c.userKey,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -165,6 +217,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return err
+		}
+	}
 	return nil
 	// {
 	//   "description": "Insert Mail into Google's Gmail backends",
diff --git a/admin/reports_v1/admin-gen.go b/admin/reports_v1/admin-gen.go
index 48beaf0..a0e8b87 100644
--- a/admin/reports_v1/admin-gen.go
+++ b/admin/reports_v1/admin-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "admin:reports_v1"
 const apiName = "admin"
diff --git a/adsense/v1.2/adsense-api.json b/adsense/v1.2/adsense-api.json
index 81ebfa4..5e8ef7c 100644
--- a/adsense/v1.2/adsense-api.json
+++ b/adsense/v1.2/adsense-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/EUOYPDRuvz7lgiNaz-i4liDZMRs\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/4sbh2CScTU4USzSvCohsjZ1YVxQ\"",
  "discoveryVersion": "v1",
  "id": "adsense:v1.2",
  "name": "adsense",
  "canonicalName": "AdSense",
  "version": "v1.2",
- "revision": "20141214",
+ "revision": "20141218",
  "title": "AdSense Management API",
  "description": "Gives AdSense publishers access to their inventory and the ability to generate reports",
  "ownerDomain": "google.com",
diff --git a/adsense/v1.2/adsense-gen.go b/adsense/v1.2/adsense-gen.go
index 27385f5..d0516f2 100644
--- a/adsense/v1.2/adsense-gen.go
+++ b/adsense/v1.2/adsense-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "adsense:v1.2"
 const apiName = "adsense"
diff --git a/adsense/v1.3/adsense-api.json b/adsense/v1.3/adsense-api.json
index d1ddab9..1bcbaef 100644
--- a/adsense/v1.3/adsense-api.json
+++ b/adsense/v1.3/adsense-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/VH0tIFOeX_a2Unq7tkoTCRU1X60\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/gTf3J_Tcqy5jzBteF-x9VlOxF-c\"",
  "discoveryVersion": "v1",
  "id": "adsense:v1.3",
  "name": "adsense",
  "canonicalName": "AdSense",
  "version": "v1.3",
- "revision": "20141214",
+ "revision": "20141218",
  "title": "AdSense Management API",
  "description": "Gives AdSense publishers access to their inventory and the ability to generate reports",
  "ownerDomain": "google.com",
diff --git a/adsense/v1.3/adsense-gen.go b/adsense/v1.3/adsense-gen.go
index 1a10def..f9de316 100644
--- a/adsense/v1.3/adsense-gen.go
+++ b/adsense/v1.3/adsense-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "adsense:v1.3"
 const apiName = "adsense"
diff --git a/adsense/v1.4/adsense-api.json b/adsense/v1.4/adsense-api.json
index ca0ec10..f7d6f49 100644
--- a/adsense/v1.4/adsense-api.json
+++ b/adsense/v1.4/adsense-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/cSAJZw9ECrA-JzSWXpGjzWoNS_s\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/wiZb5dkU0x3zrMckXzFKcTNAp8w\"",
  "discoveryVersion": "v1",
  "id": "adsense:v1.4",
  "name": "adsense",
  "canonicalName": "AdSense",
  "version": "v1.4",
- "revision": "20141214",
+ "revision": "20141218",
  "title": "AdSense Management API",
  "description": "Gives AdSense publishers access to their inventory and the ability to generate reports",
  "ownerDomain": "google.com",
diff --git a/adsense/v1.4/adsense-gen.go b/adsense/v1.4/adsense-gen.go
index 8942f54..8e39a52 100644
--- a/adsense/v1.4/adsense-gen.go
+++ b/adsense/v1.4/adsense-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "adsense:v1.4"
 const apiName = "adsense"
diff --git a/adsensehost/v4.1/adsensehost-api.json b/adsensehost/v4.1/adsensehost-api.json
index 5f28bbb..e4979b1 100644
--- a/adsensehost/v4.1/adsensehost-api.json
+++ b/adsensehost/v4.1/adsensehost-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/n73C2dBnFfcnbfxLoGnW4YmiSSM\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/yoGXlEwGPLEAr1fmq3U4dFpyoyQ\"",
  "discoveryVersion": "v1",
  "id": "adsensehost:v4.1",
  "name": "adsensehost",
  "canonicalName": "AdSense Host",
  "version": "v4.1",
- "revision": "20150105",
+ "revision": "20141229",
  "title": "AdSense Host API",
  "description": "Gives AdSense Hosts access to report generation, ad code generation, and publisher management capabilities.",
  "ownerDomain": "google.com",
diff --git a/adsensehost/v4.1/adsensehost-gen.go b/adsensehost/v4.1/adsensehost-gen.go
index 7325c06..326c156 100644
--- a/adsensehost/v4.1/adsensehost-gen.go
+++ b/adsensehost/v4.1/adsensehost-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "adsensehost:v4.1"
 const apiName = "adsensehost"
diff --git a/analytics/v2.4/analytics-api.json b/analytics/v2.4/analytics-api.json
index b90e407..384b7a3 100644
--- a/analytics/v2.4/analytics-api.json
+++ b/analytics/v2.4/analytics-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/BO6kVfqwWO5L9bXAgxK45Sf918M\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/deWKWLiFywg1_Xne7G92xyJt8HA\"",
  "discoveryVersion": "v1",
  "id": "analytics:v2.4",
  "name": "analytics",
  "version": "v2.4",
- "revision": "20141112",
+ "revision": "20150115",
  "title": "Google Analytics API",
  "description": "View and manage your Google Analytics data",
  "ownerDomain": "google.com",
diff --git a/analytics/v2.4/analytics-gen.go b/analytics/v2.4/analytics-gen.go
index 603f13f..dad52be 100644
--- a/analytics/v2.4/analytics-gen.go
+++ b/analytics/v2.4/analytics-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "analytics:v2.4"
 const apiName = "analytics"
diff --git a/analytics/v3/analytics-api.json b/analytics/v3/analytics-api.json
index b7a6c78..04a5cb3 100644
--- a/analytics/v3/analytics-api.json
+++ b/analytics/v3/analytics-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/di5CV00BXsFLkasXWC9r5t6-kNE\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/A7O3agkh7yC4B7ApdQ5Stb69Ofk\"",
  "discoveryVersion": "v1",
  "id": "analytics:v3",
  "name": "analytics",
  "version": "v3",
- "revision": "20141112",
+ "revision": "20150115",
  "title": "Google Analytics API",
  "description": "View and manage your Google Analytics data",
  "ownerDomain": "google.com",
@@ -565,6 +565,260 @@
     }
    }
   },
+  "CustomDimension": {
+   "id": "CustomDimension",
+   "type": "object",
+   "description": "JSON template for Analytics Custom Dimension.",
+   "properties": {
+    "accountId": {
+     "type": "string",
+     "description": "Account ID."
+    },
+    "active": {
+     "type": "boolean",
+     "description": "Boolean indicating whether the custom dimension is active."
+    },
+    "created": {
+     "type": "string",
+     "description": "Time the custom dimension was created.",
+     "format": "date-time",
+     "readOnly": true
+    },
+    "id": {
+     "type": "string",
+     "description": "Custom dimension ID."
+    },
+    "index": {
+     "type": "integer",
+     "description": "Index of the custom dimension.",
+     "format": "int32",
+     "readOnly": true
+    },
+    "kind": {
+     "type": "string",
+     "description": "Kind value for a custom dimension. Set to \"analytics#customDimension\". It is a read-only field.",
+     "default": "analytics#customDimension",
+     "readOnly": true
+    },
+    "name": {
+     "type": "string",
+     "description": "Name of the custom dimension."
+    },
+    "parentLink": {
+     "type": "object",
+     "description": "Parent link for the custom dimension. Points to the property to which the custom dimension belongs.",
+     "properties": {
+      "href": {
+       "type": "string",
+       "description": "Link to the property to which the custom dimension belongs."
+      },
+      "type": {
+       "type": "string",
+       "description": "Type of the parent link. Set to \"analytics#webproperty\".",
+       "default": "analytics#webproperty"
+      }
+     }
+    },
+    "scope": {
+     "type": "string",
+     "description": "Scope of the custom dimension: HIT, SESSION, USER or PRODUCT."
+    },
+    "selfLink": {
+     "type": "string",
+     "description": "Link for the custom dimension",
+     "readOnly": true
+    },
+    "updated": {
+     "type": "string",
+     "description": "Time the custom dimension was last modified.",
+     "format": "date-time",
+     "readOnly": true
+    },
+    "webPropertyId": {
+     "type": "string",
+     "description": "Property ID."
+    }
+   }
+  },
+  "CustomDimensions": {
+   "id": "CustomDimensions",
+   "type": "object",
+   "description": "A custom dimension collection lists Analytics custom dimensions to which the user has access. Each resource in the collection corresponds to a single Analytics custom dimension.",
+   "properties": {
+    "items": {
+     "type": "array",
+     "description": "Collection of custom dimensions.",
+     "items": {
+      "$ref": "CustomDimension"
+     }
+    },
+    "itemsPerPage": {
+     "type": "integer",
+     "description": "The maximum number of resources the response can contain, regardless of the actual number of resources returned. Its value ranges from 1 to 1000 with a value of 1000 by default, or otherwise specified by the max-results query parameter.",
+     "format": "int32"
+    },
+    "kind": {
+     "type": "string",
+     "description": "Collection type.",
+     "default": "analytics#customDimensions"
+    },
+    "nextLink": {
+     "type": "string",
+     "description": "Link to next page for this custom dimension collection."
+    },
+    "previousLink": {
+     "type": "string",
+     "description": "Link to previous page for this custom dimension collection."
+    },
+    "startIndex": {
+     "type": "integer",
+     "description": "The starting index of the resources, which is 1 by default or otherwise specified by the start-index query parameter.",
+     "format": "int32"
+    },
+    "totalResults": {
+     "type": "integer",
+     "description": "The total number of results for the query, regardless of the number of results in the response.",
+     "format": "int32"
+    },
+    "username": {
+     "type": "string",
+     "description": "Email ID of the authenticated user"
+    }
+   }
+  },
+  "CustomMetric": {
+   "id": "CustomMetric",
+   "type": "object",
+   "description": "JSON template for Analytics Custom Metric.",
+   "properties": {
+    "accountId": {
+     "type": "string",
+     "description": "Account ID."
+    },
+    "active": {
+     "type": "boolean",
+     "description": "Boolean indicating whether the custom metric is active."
+    },
+    "created": {
+     "type": "string",
+     "description": "Time the custom metric was created.",
+     "format": "date-time",
+     "readOnly": true
+    },
+    "id": {
+     "type": "string",
+     "description": "Custom metric ID."
+    },
+    "index": {
+     "type": "integer",
+     "description": "Index of the custom metric.",
+     "format": "int32",
+     "readOnly": true
+    },
+    "kind": {
+     "type": "string",
+     "description": "Kind value for a custom metric. Set to \"analytics#customMetric\". It is a read-only field.",
+     "default": "analytics#customMetric",
+     "readOnly": true
+    },
+    "max_value": {
+     "type": "string",
+     "description": "Max value of custom metric."
+    },
+    "min_value": {
+     "type": "string",
+     "description": "Min value of custom metric."
+    },
+    "name": {
+     "type": "string",
+     "description": "Name of the custom metric."
+    },
+    "parentLink": {
+     "type": "object",
+     "description": "Parent link for the custom metric. Points to the property to which the custom metric belongs.",
+     "properties": {
+      "href": {
+       "type": "string",
+       "description": "Link to the property to which the custom metric belongs."
+      },
+      "type": {
+       "type": "string",
+       "description": "Type of the parent link. Set to \"analytics#webproperty\".",
+       "default": "analytics#webproperty"
+      }
+     }
+    },
+    "scope": {
+     "type": "string",
+     "description": "Scope of the custom metric: HIT or PRODUCT."
+    },
+    "selfLink": {
+     "type": "string",
+     "description": "Link for the custom metric",
+     "readOnly": true
+    },
+    "type": {
+     "type": "string",
+     "description": "Data type of custom metric."
+    },
+    "updated": {
+     "type": "string",
+     "description": "Time the custom metric was last modified.",
+     "format": "date-time",
+     "readOnly": true
+    },
+    "webPropertyId": {
+     "type": "string",
+     "description": "Property ID."
+    }
+   }
+  },
+  "CustomMetrics": {
+   "id": "CustomMetrics",
+   "type": "object",
+   "description": "A custom metric collection lists Analytics custom metrics to which the user has access. Each resource in the collection corresponds to a single Analytics custom metric.",
+   "properties": {
+    "items": {
+     "type": "array",
+     "description": "Collection of custom metrics.",
+     "items": {
+      "$ref": "CustomMetric"
+     }
+    },
+    "itemsPerPage": {
+     "type": "integer",
+     "description": "The maximum number of resources the response can contain, regardless of the actual number of resources returned. Its value ranges from 1 to 1000 with a value of 1000 by default, or otherwise specified by the max-results query parameter.",
+     "format": "int32"
+    },
+    "kind": {
+     "type": "string",
+     "description": "Collection type.",
+     "default": "analytics#customMetrics"
+    },
+    "nextLink": {
+     "type": "string",
+     "description": "Link to next page for this custom metric collection."
+    },
+    "previousLink": {
+     "type": "string",
+     "description": "Link to previous page for this custom metric collection."
+    },
+    "startIndex": {
+     "type": "integer",
+     "description": "The starting index of the resources, which is 1 by default or otherwise specified by the start-index query parameter.",
+     "format": "int32"
+    },
+    "totalResults": {
+     "type": "integer",
+     "description": "The total number of results for the query, regardless of the number of results in the response.",
+     "format": "int32"
+    },
+    "username": {
+     "type": "string",
+     "description": "Email ID of the authenticated user"
+    }
+   }
+  },
   "DailyUpload": {
    "id": "DailyUpload",
    "type": "object",
@@ -3698,6 +3952,428 @@
       }
      }
     },
+    "customDimensions": {
+     "methods": {
+      "get": {
+       "id": "analytics.management.customDimensions.get",
+       "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions/{customDimensionId}",
+       "httpMethod": "GET",
+       "description": "Get a custom dimension to which the user has access.",
+       "parameters": {
+        "accountId": {
+         "type": "string",
+         "description": "Account ID for the custom dimension to retrieve.",
+         "required": true,
+         "location": "path"
+        },
+        "customDimensionId": {
+         "type": "string",
+         "description": "The ID of the custom dimension to retrieve.",
+         "required": true,
+         "location": "path"
+        },
+        "webPropertyId": {
+         "type": "string",
+         "description": "Web property ID for the custom dimension to retrieve.",
+         "required": true,
+         "location": "path"
+        }
+       },
+       "parameterOrder": [
+        "accountId",
+        "webPropertyId",
+        "customDimensionId"
+       ],
+       "response": {
+        "$ref": "CustomDimension"
+       },
+       "scopes": [
+        "https://www.googleapis.com/auth/analytics.edit",
+        "https://www.googleapis.com/auth/analytics.readonly"
+       ]
+      },
+      "insert": {
+       "id": "analytics.management.customDimensions.insert",
+       "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions",
+       "httpMethod": "POST",
+       "description": "Create a new custom dimension.",
+       "parameters": {
+        "accountId": {
+         "type": "string",
+         "description": "Account ID for the custom dimension to create.",
+         "required": true,
+         "location": "path"
+        },
+        "webPropertyId": {
+         "type": "string",
+         "description": "Web property ID for the custom dimension to create.",
+         "required": true,
+         "location": "path"
+        }
+       },
+       "parameterOrder": [
+        "accountId",
+        "webPropertyId"
+       ],
+       "request": {
+        "$ref": "CustomDimension"
+       },
+       "response": {
+        "$ref": "CustomDimension"
+       },
+       "scopes": [
+        "https://www.googleapis.com/auth/analytics.edit"
+       ]
+      },
+      "list": {
+       "id": "analytics.management.customDimensions.list",
+       "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions",
+       "httpMethod": "GET",
+       "description": "Lists custom dimensions to which the user has access.",
+       "parameters": {
+        "accountId": {
+         "type": "string",
+         "description": "Account ID for the custom dimensions to retrieve.",
+         "required": true,
+         "location": "path"
+        },
+        "max-results": {
+         "type": "integer",
+         "description": "The maximum number of custom dimensions to include in this response.",
+         "format": "int32",
+         "location": "query"
+        },
+        "start-index": {
+         "type": "integer",
+         "description": "An index of the first entity to retrieve. Use this parameter as a pagination mechanism along with the max-results parameter.",
+         "format": "int32",
+         "minimum": "1",
+         "location": "query"
+        },
+        "webPropertyId": {
+         "type": "string",
+         "description": "Web property ID for the custom dimensions to retrieve.",
+         "required": true,
+         "location": "path"
+        }
+       },
+       "parameterOrder": [
+        "accountId",
+        "webPropertyId"
+       ],
+       "response": {
+        "$ref": "CustomDimensions"
+       },
+       "scopes": [
+        "https://www.googleapis.com/auth/analytics",
+        "https://www.googleapis.com/auth/analytics.readonly"
+       ]
+      },
+      "patch": {
+       "id": "analytics.management.customDimensions.patch",
+       "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions/{customDimensionId}",
+       "httpMethod": "PATCH",
+       "description": "Updates an existing custom dimension. This method supports patch semantics.",
+       "parameters": {
+        "accountId": {
+         "type": "string",
+         "description": "Account ID for the custom dimension to update.",
+         "required": true,
+         "location": "path"
+        },
+        "customDimensionId": {
+         "type": "string",
+         "description": "Custom dimension ID for the custom dimension to update.",
+         "required": true,
+         "location": "path"
+        },
+        "ignoreCustomDataSourceLinks": {
+         "type": "boolean",
+         "description": "Force the update and ignore any warnings related to the custom dimension being linked to a custom data source / data set.",
+         "default": "false",
+         "location": "query"
+        },
+        "webPropertyId": {
+         "type": "string",
+         "description": "Web property ID for the custom dimension to update.",
+         "required": true,
+         "location": "path"
+        }
+       },
+       "parameterOrder": [
+        "accountId",
+        "webPropertyId",
+        "customDimensionId"
+       ],
+       "request": {
+        "$ref": "CustomDimension"
+       },
+       "response": {
+        "$ref": "CustomDimension"
+       },
+       "scopes": [
+        "https://www.googleapis.com/auth/analytics.edit"
+       ]
+      },
+      "update": {
+       "id": "analytics.management.customDimensions.update",
+       "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions/{customDimensionId}",
+       "httpMethod": "PUT",
+       "description": "Updates an existing custom dimension.",
+       "parameters": {
+        "accountId": {
+         "type": "string",
+         "description": "Account ID for the custom dimension to update.",
+         "required": true,
+         "location": "path"
+        },
+        "customDimensionId": {
+         "type": "string",
+         "description": "Custom dimension ID for the custom dimension to update.",
+         "required": true,
+         "location": "path"
+        },
+        "ignoreCustomDataSourceLinks": {
+         "type": "boolean",
+         "description": "Force the update and ignore any warnings related to the custom dimension being linked to a custom data source / data set.",
+         "default": "false",
+         "location": "query"
+        },
+        "webPropertyId": {
+         "type": "string",
+         "description": "Web property ID for the custom dimension to update.",
+         "required": true,
+         "location": "path"
+        }
+       },
+       "parameterOrder": [
+        "accountId",
+        "webPropertyId",
+        "customDimensionId"
+       ],
+       "request": {
+        "$ref": "CustomDimension"
+       },
+       "response": {
+        "$ref": "CustomDimension"
+       },
+       "scopes": [
+        "https://www.googleapis.com/auth/analytics.edit"
+       ]
+      }
+     }
+    },
+    "customMetrics": {
+     "methods": {
+      "get": {
+       "id": "analytics.management.customMetrics.get",
+       "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics/{customMetricId}",
+       "httpMethod": "GET",
+       "description": "Get a custom metric to which the user has access.",
+       "parameters": {
+        "accountId": {
+         "type": "string",
+         "description": "Account ID for the custom metric to retrieve.",
+         "required": true,
+         "location": "path"
+        },
+        "customMetricId": {
+         "type": "string",
+         "description": "The ID of the custom metric to retrieve.",
+         "required": true,
+         "location": "path"
+        },
+        "webPropertyId": {
+         "type": "string",
+         "description": "Web property ID for the custom metric to retrieve.",
+         "required": true,
+         "location": "path"
+        }
+       },
+       "parameterOrder": [
+        "accountId",
+        "webPropertyId",
+        "customMetricId"
+       ],
+       "response": {
+        "$ref": "CustomMetric"
+       },
+       "scopes": [
+        "https://www.googleapis.com/auth/analytics.edit",
+        "https://www.googleapis.com/auth/analytics.readonly"
+       ]
+      },
+      "insert": {
+       "id": "analytics.management.customMetrics.insert",
+       "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics",
+       "httpMethod": "POST",
+       "description": "Create a new custom metric.",
+       "parameters": {
+        "accountId": {
+         "type": "string",
+         "description": "Account ID for the custom metric to create.",
+         "required": true,
+         "location": "path"
+        },
+        "webPropertyId": {
+         "type": "string",
+         "description": "Web property ID for the custom dimension to create.",
+         "required": true,
+         "location": "path"
+        }
+       },
+       "parameterOrder": [
+        "accountId",
+        "webPropertyId"
+       ],
+       "request": {
+        "$ref": "CustomMetric"
+       },
+       "response": {
+        "$ref": "CustomMetric"
+       },
+       "scopes": [
+        "https://www.googleapis.com/auth/analytics.edit"
+       ]
+      },
+      "list": {
+       "id": "analytics.management.customMetrics.list",
+       "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics",
+       "httpMethod": "GET",
+       "description": "Lists custom metrics to which the user has access.",
+       "parameters": {
+        "accountId": {
+         "type": "string",
+         "description": "Account ID for the custom metrics to retrieve.",
+         "required": true,
+         "location": "path"
+        },
+        "max-results": {
+         "type": "integer",
+         "description": "The maximum number of custom metrics to include in this response.",
+         "format": "int32",
+         "location": "query"
+        },
+        "start-index": {
+         "type": "integer",
+         "description": "An index of the first entity to retrieve. Use this parameter as a pagination mechanism along with the max-results parameter.",
+         "format": "int32",
+         "minimum": "1",
+         "location": "query"
+        },
+        "webPropertyId": {
+         "type": "string",
+         "description": "Web property ID for the custom metrics to retrieve.",
+         "required": true,
+         "location": "path"
+        }
+       },
+       "parameterOrder": [
+        "accountId",
+        "webPropertyId"
+       ],
+       "response": {
+        "$ref": "CustomMetrics"
+       },
+       "scopes": [
+        "https://www.googleapis.com/auth/analytics",
+        "https://www.googleapis.com/auth/analytics.readonly"
+       ]
+      },
+      "patch": {
+       "id": "analytics.management.customMetrics.patch",
+       "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics/{customMetricId}",
+       "httpMethod": "PATCH",
+       "description": "Updates an existing custom metric. This method supports patch semantics.",
+       "parameters": {
+        "accountId": {
+         "type": "string",
+         "description": "Account ID for the custom metric to update.",
+         "required": true,
+         "location": "path"
+        },
+        "customMetricId": {
+         "type": "string",
+         "description": "Custom metric ID for the custom metric to update.",
+         "required": true,
+         "location": "path"
+        },
+        "ignoreCustomDataSourceLinks": {
+         "type": "boolean",
+         "description": "Force the update and ignore any warnings related to the custom metric being linked to a custom data source / data set.",
+         "default": "false",
+         "location": "query"
+        },
+        "webPropertyId": {
+         "type": "string",
+         "description": "Web property ID for the custom metric to update.",
+         "required": true,
+         "location": "path"
+        }
+       },
+       "parameterOrder": [
+        "accountId",
+        "webPropertyId",
+        "customMetricId"
+       ],
+       "request": {
+        "$ref": "CustomMetric"
+       },
+       "response": {
+        "$ref": "CustomMetric"
+       },
+       "scopes": [
+        "https://www.googleapis.com/auth/analytics.edit"
+       ]
+      },
+      "update": {
+       "id": "analytics.management.customMetrics.update",
+       "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics/{customMetricId}",
+       "httpMethod": "PUT",
+       "description": "Updates an existing custom metric.",
+       "parameters": {
+        "accountId": {
+         "type": "string",
+         "description": "Account ID for the custom metric to update.",
+         "required": true,
+         "location": "path"
+        },
+        "customMetricId": {
+         "type": "string",
+         "description": "Custom metric ID for the custom metric to update.",
+         "required": true,
+         "location": "path"
+        },
+        "ignoreCustomDataSourceLinks": {
+         "type": "boolean",
+         "description": "Force the update and ignore any warnings related to the custom metric being linked to a custom data source / data set.",
+         "default": "false",
+         "location": "query"
+        },
+        "webPropertyId": {
+         "type": "string",
+         "description": "Web property ID for the custom metric to update.",
+         "required": true,
+         "location": "path"
+        }
+       },
+       "parameterOrder": [
+        "accountId",
+        "webPropertyId",
+        "customMetricId"
+       ],
+       "request": {
+        "$ref": "CustomMetric"
+       },
+       "response": {
+        "$ref": "CustomMetric"
+       },
+       "scopes": [
+        "https://www.googleapis.com/auth/analytics.edit"
+       ]
+      }
+     }
+    },
     "dailyUploads": {
      "methods": {
       "delete": {
diff --git a/analytics/v3/analytics-gen.go b/analytics/v3/analytics-gen.go
index 5ce5b64..1a2a3ed 100644
--- a/analytics/v3/analytics-gen.go
+++ b/analytics/v3/analytics-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "analytics:v3"
 const apiName = "analytics"
@@ -137,6 +139,8 @@
 	rs.AccountUserLinks = NewManagementAccountUserLinksService(s)
 	rs.Accounts = NewManagementAccountsService(s)
 	rs.CustomDataSources = NewManagementCustomDataSourcesService(s)
+	rs.CustomDimensions = NewManagementCustomDimensionsService(s)
+	rs.CustomMetrics = NewManagementCustomMetricsService(s)
 	rs.DailyUploads = NewManagementDailyUploadsService(s)
 	rs.Experiments = NewManagementExperimentsService(s)
 	rs.Filters = NewManagementFiltersService(s)
@@ -164,6 +168,10 @@
 
 	CustomDataSources *ManagementCustomDataSourcesService
 
+	CustomDimensions *ManagementCustomDimensionsService
+
+	CustomMetrics *ManagementCustomMetricsService
+
 	DailyUploads *ManagementDailyUploadsService
 
 	Experiments *ManagementExperimentsService
@@ -227,6 +235,24 @@
 	s *Service
 }
 
+func NewManagementCustomDimensionsService(s *Service) *ManagementCustomDimensionsService {
+	rs := &ManagementCustomDimensionsService{s: s}
+	return rs
+}
+
+type ManagementCustomDimensionsService struct {
+	s *Service
+}
+
+func NewManagementCustomMetricsService(s *Service) *ManagementCustomMetricsService {
+	rs := &ManagementCustomMetricsService{s: s}
+	return rs
+}
+
+type ManagementCustomMetricsService struct {
+	s *Service
+}
+
 func NewManagementDailyUploadsService(s *Service) *ManagementDailyUploadsService {
 	rs := &ManagementDailyUploadsService{s: s}
 	return rs
@@ -673,6 +699,175 @@
 	Username string `json:"username,omitempty"`
 }
 
+type CustomDimension struct {
+	// AccountId: Account ID.
+	AccountId string `json:"accountId,omitempty"`
+
+	// Active: Boolean indicating whether the custom dimension is active.
+	Active bool `json:"active,omitempty"`
+
+	// Created: Time the custom dimension was created.
+	Created string `json:"created,omitempty"`
+
+	// Id: Custom dimension ID.
+	Id string `json:"id,omitempty"`
+
+	// Index: Index of the custom dimension.
+	Index int64 `json:"index,omitempty"`
+
+	// Kind: Kind value for a custom dimension. Set to
+	// "analytics#customDimension". It is a read-only field.
+	Kind string `json:"kind,omitempty"`
+
+	// Name: Name of the custom dimension.
+	Name string `json:"name,omitempty"`
+
+	// ParentLink: Parent link for the custom dimension. Points to the
+	// property to which the custom dimension belongs.
+	ParentLink *CustomDimensionParentLink `json:"parentLink,omitempty"`
+
+	// Scope: Scope of the custom dimension: HIT, SESSION, USER or PRODUCT.
+	Scope string `json:"scope,omitempty"`
+
+	// SelfLink: Link for the custom dimension
+	SelfLink string `json:"selfLink,omitempty"`
+
+	// Updated: Time the custom dimension was last modified.
+	Updated string `json:"updated,omitempty"`
+
+	// WebPropertyId: Property ID.
+	WebPropertyId string `json:"webPropertyId,omitempty"`
+}
+
+type CustomDimensionParentLink struct {
+	// Href: Link to the property to which the custom dimension belongs.
+	Href string `json:"href,omitempty"`
+
+	// Type: Type of the parent link. Set to "analytics#webproperty".
+	Type string `json:"type,omitempty"`
+}
+
+type CustomDimensions struct {
+	// Items: Collection of custom dimensions.
+	Items []*CustomDimension `json:"items,omitempty"`
+
+	// ItemsPerPage: The maximum number of resources the response can
+	// contain, regardless of the actual number of resources returned. Its
+	// value ranges from 1 to 1000 with a value of 1000 by default, or
+	// otherwise specified by the max-results query parameter.
+	ItemsPerPage int64 `json:"itemsPerPage,omitempty"`
+
+	// Kind: Collection type.
+	Kind string `json:"kind,omitempty"`
+
+	// NextLink: Link to next page for this custom dimension collection.
+	NextLink string `json:"nextLink,omitempty"`
+
+	// PreviousLink: Link to previous page for this custom dimension
+	// collection.
+	PreviousLink string `json:"previousLink,omitempty"`
+
+	// StartIndex: The starting index of the resources, which is 1 by
+	// default or otherwise specified by the start-index query parameter.
+	StartIndex int64 `json:"startIndex,omitempty"`
+
+	// TotalResults: The total number of results for the query, regardless
+	// of the number of results in the response.
+	TotalResults int64 `json:"totalResults,omitempty"`
+
+	// Username: Email ID of the authenticated user
+	Username string `json:"username,omitempty"`
+}
+
+type CustomMetric struct {
+	// AccountId: Account ID.
+	AccountId string `json:"accountId,omitempty"`
+
+	// Active: Boolean indicating whether the custom metric is active.
+	Active bool `json:"active,omitempty"`
+
+	// Created: Time the custom metric was created.
+	Created string `json:"created,omitempty"`
+
+	// Id: Custom metric ID.
+	Id string `json:"id,omitempty"`
+
+	// Index: Index of the custom metric.
+	Index int64 `json:"index,omitempty"`
+
+	// Kind: Kind value for a custom metric. Set to
+	// "analytics#customMetric". It is a read-only field.
+	Kind string `json:"kind,omitempty"`
+
+	// Max_value: Max value of custom metric.
+	Max_value string `json:"max_value,omitempty"`
+
+	// Min_value: Min value of custom metric.
+	Min_value string `json:"min_value,omitempty"`
+
+	// Name: Name of the custom metric.
+	Name string `json:"name,omitempty"`
+
+	// ParentLink: Parent link for the custom metric. Points to the property
+	// to which the custom metric belongs.
+	ParentLink *CustomMetricParentLink `json:"parentLink,omitempty"`
+
+	// Scope: Scope of the custom metric: HIT or PRODUCT.
+	Scope string `json:"scope,omitempty"`
+
+	// SelfLink: Link for the custom metric
+	SelfLink string `json:"selfLink,omitempty"`
+
+	// Type: Data type of custom metric.
+	Type string `json:"type,omitempty"`
+
+	// Updated: Time the custom metric was last modified.
+	Updated string `json:"updated,omitempty"`
+
+	// WebPropertyId: Property ID.
+	WebPropertyId string `json:"webPropertyId,omitempty"`
+}
+
+type CustomMetricParentLink struct {
+	// Href: Link to the property to which the custom metric belongs.
+	Href string `json:"href,omitempty"`
+
+	// Type: Type of the parent link. Set to "analytics#webproperty".
+	Type string `json:"type,omitempty"`
+}
+
+type CustomMetrics struct {
+	// Items: Collection of custom metrics.
+	Items []*CustomMetric `json:"items,omitempty"`
+
+	// ItemsPerPage: The maximum number of resources the response can
+	// contain, regardless of the actual number of resources returned. Its
+	// value ranges from 1 to 1000 with a value of 1000 by default, or
+	// otherwise specified by the max-results query parameter.
+	ItemsPerPage int64 `json:"itemsPerPage,omitempty"`
+
+	// Kind: Collection type.
+	Kind string `json:"kind,omitempty"`
+
+	// NextLink: Link to next page for this custom metric collection.
+	NextLink string `json:"nextLink,omitempty"`
+
+	// PreviousLink: Link to previous page for this custom metric
+	// collection.
+	PreviousLink string `json:"previousLink,omitempty"`
+
+	// StartIndex: The starting index of the resources, which is 1 by
+	// default or otherwise specified by the start-index query parameter.
+	StartIndex int64 `json:"startIndex,omitempty"`
+
+	// TotalResults: The total number of results for the query, regardless
+	// of the number of results in the response.
+	TotalResults int64 `json:"totalResults,omitempty"`
+
+	// Username: Email ID of the authenticated user
+	Username string `json:"username,omitempty"`
+}
+
 type DailyUpload struct {
 	// AccountId: Account ID to which this daily upload belongs.
 	AccountId string `json:"accountId,omitempty"`
@@ -3970,6 +4165,1138 @@
 
 }
 
+// method id "analytics.management.customDimensions.get":
+
+type ManagementCustomDimensionsGetCall struct {
+	s                 *Service
+	accountId         string
+	webPropertyId     string
+	customDimensionId string
+	opt_              map[string]interface{}
+}
+
+// Get: Get a custom dimension to which the user has access.
+func (r *ManagementCustomDimensionsService) Get(accountId string, webPropertyId string, customDimensionId string) *ManagementCustomDimensionsGetCall {
+	c := &ManagementCustomDimensionsGetCall{s: r.s, opt_: make(map[string]interface{})}
+	c.accountId = accountId
+	c.webPropertyId = webPropertyId
+	c.customDimensionId = customDimensionId
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *ManagementCustomDimensionsGetCall) Fields(s ...googleapi.Field) *ManagementCustomDimensionsGetCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *ManagementCustomDimensionsGetCall) Do() (*CustomDimension, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions/{customDimensionId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"accountId":         c.accountId,
+		"webPropertyId":     c.webPropertyId,
+		"customDimensionId": c.customDimensionId,
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *CustomDimension
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Get a custom dimension to which the user has access.",
+	//   "httpMethod": "GET",
+	//   "id": "analytics.management.customDimensions.get",
+	//   "parameterOrder": [
+	//     "accountId",
+	//     "webPropertyId",
+	//     "customDimensionId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "Account ID for the custom dimension to retrieve.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "customDimensionId": {
+	//       "description": "The ID of the custom dimension to retrieve.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "webPropertyId": {
+	//       "description": "Web property ID for the custom dimension to retrieve.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions/{customDimensionId}",
+	//   "response": {
+	//     "$ref": "CustomDimension"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/analytics.edit",
+	//     "https://www.googleapis.com/auth/analytics.readonly"
+	//   ]
+	// }
+
+}
+
+// method id "analytics.management.customDimensions.insert":
+
+type ManagementCustomDimensionsInsertCall struct {
+	s               *Service
+	accountId       string
+	webPropertyId   string
+	customdimension *CustomDimension
+	opt_            map[string]interface{}
+}
+
+// Insert: Create a new custom dimension.
+func (r *ManagementCustomDimensionsService) Insert(accountId string, webPropertyId string, customdimension *CustomDimension) *ManagementCustomDimensionsInsertCall {
+	c := &ManagementCustomDimensionsInsertCall{s: r.s, opt_: make(map[string]interface{})}
+	c.accountId = accountId
+	c.webPropertyId = webPropertyId
+	c.customdimension = customdimension
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *ManagementCustomDimensionsInsertCall) Fields(s ...googleapi.Field) *ManagementCustomDimensionsInsertCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *ManagementCustomDimensionsInsertCall) Do() (*CustomDimension, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.customdimension)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("POST", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"accountId":     c.accountId,
+		"webPropertyId": c.webPropertyId,
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *CustomDimension
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Create a new custom dimension.",
+	//   "httpMethod": "POST",
+	//   "id": "analytics.management.customDimensions.insert",
+	//   "parameterOrder": [
+	//     "accountId",
+	//     "webPropertyId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "Account ID for the custom dimension to create.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "webPropertyId": {
+	//       "description": "Web property ID for the custom dimension to create.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions",
+	//   "request": {
+	//     "$ref": "CustomDimension"
+	//   },
+	//   "response": {
+	//     "$ref": "CustomDimension"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/analytics.edit"
+	//   ]
+	// }
+
+}
+
+// method id "analytics.management.customDimensions.list":
+
+type ManagementCustomDimensionsListCall struct {
+	s             *Service
+	accountId     string
+	webPropertyId string
+	opt_          map[string]interface{}
+}
+
+// List: Lists custom dimensions to which the user has access.
+func (r *ManagementCustomDimensionsService) List(accountId string, webPropertyId string) *ManagementCustomDimensionsListCall {
+	c := &ManagementCustomDimensionsListCall{s: r.s, opt_: make(map[string]interface{})}
+	c.accountId = accountId
+	c.webPropertyId = webPropertyId
+	return c
+}
+
+// MaxResults sets the optional parameter "max-results": The maximum
+// number of custom dimensions to include in this response.
+func (c *ManagementCustomDimensionsListCall) MaxResults(maxResults int64) *ManagementCustomDimensionsListCall {
+	c.opt_["max-results"] = maxResults
+	return c
+}
+
+// StartIndex sets the optional parameter "start-index": An index of the
+// first entity to retrieve. Use this parameter as a pagination
+// mechanism along with the max-results parameter.
+func (c *ManagementCustomDimensionsListCall) StartIndex(startIndex int64) *ManagementCustomDimensionsListCall {
+	c.opt_["start-index"] = startIndex
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *ManagementCustomDimensionsListCall) Fields(s ...googleapi.Field) *ManagementCustomDimensionsListCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *ManagementCustomDimensionsListCall) Do() (*CustomDimensions, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["max-results"]; ok {
+		params.Set("max-results", fmt.Sprintf("%v", v))
+	}
+	if v, ok := c.opt_["start-index"]; ok {
+		params.Set("start-index", fmt.Sprintf("%v", v))
+	}
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"accountId":     c.accountId,
+		"webPropertyId": c.webPropertyId,
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *CustomDimensions
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Lists custom dimensions to which the user has access.",
+	//   "httpMethod": "GET",
+	//   "id": "analytics.management.customDimensions.list",
+	//   "parameterOrder": [
+	//     "accountId",
+	//     "webPropertyId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "Account ID for the custom dimensions to retrieve.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "max-results": {
+	//       "description": "The maximum number of custom dimensions to include in this response.",
+	//       "format": "int32",
+	//       "location": "query",
+	//       "type": "integer"
+	//     },
+	//     "start-index": {
+	//       "description": "An index of the first entity to retrieve. Use this parameter as a pagination mechanism along with the max-results parameter.",
+	//       "format": "int32",
+	//       "location": "query",
+	//       "minimum": "1",
+	//       "type": "integer"
+	//     },
+	//     "webPropertyId": {
+	//       "description": "Web property ID for the custom dimensions to retrieve.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions",
+	//   "response": {
+	//     "$ref": "CustomDimensions"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/analytics",
+	//     "https://www.googleapis.com/auth/analytics.readonly"
+	//   ]
+	// }
+
+}
+
+// method id "analytics.management.customDimensions.patch":
+
+type ManagementCustomDimensionsPatchCall struct {
+	s                 *Service
+	accountId         string
+	webPropertyId     string
+	customDimensionId string
+	customdimension   *CustomDimension
+	opt_              map[string]interface{}
+}
+
+// Patch: Updates an existing custom dimension. This method supports
+// patch semantics.
+func (r *ManagementCustomDimensionsService) Patch(accountId string, webPropertyId string, customDimensionId string, customdimension *CustomDimension) *ManagementCustomDimensionsPatchCall {
+	c := &ManagementCustomDimensionsPatchCall{s: r.s, opt_: make(map[string]interface{})}
+	c.accountId = accountId
+	c.webPropertyId = webPropertyId
+	c.customDimensionId = customDimensionId
+	c.customdimension = customdimension
+	return c
+}
+
+// IgnoreCustomDataSourceLinks sets the optional parameter
+// "ignoreCustomDataSourceLinks": Force the update and ignore any
+// warnings related to the custom dimension being linked to a custom
+// data source / data set.
+func (c *ManagementCustomDimensionsPatchCall) IgnoreCustomDataSourceLinks(ignoreCustomDataSourceLinks bool) *ManagementCustomDimensionsPatchCall {
+	c.opt_["ignoreCustomDataSourceLinks"] = ignoreCustomDataSourceLinks
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *ManagementCustomDimensionsPatchCall) Fields(s ...googleapi.Field) *ManagementCustomDimensionsPatchCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *ManagementCustomDimensionsPatchCall) Do() (*CustomDimension, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.customdimension)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["ignoreCustomDataSourceLinks"]; ok {
+		params.Set("ignoreCustomDataSourceLinks", fmt.Sprintf("%v", v))
+	}
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions/{customDimensionId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("PATCH", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"accountId":         c.accountId,
+		"webPropertyId":     c.webPropertyId,
+		"customDimensionId": c.customDimensionId,
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *CustomDimension
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Updates an existing custom dimension. This method supports patch semantics.",
+	//   "httpMethod": "PATCH",
+	//   "id": "analytics.management.customDimensions.patch",
+	//   "parameterOrder": [
+	//     "accountId",
+	//     "webPropertyId",
+	//     "customDimensionId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "Account ID for the custom dimension to update.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "customDimensionId": {
+	//       "description": "Custom dimension ID for the custom dimension to update.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "ignoreCustomDataSourceLinks": {
+	//       "default": "false",
+	//       "description": "Force the update and ignore any warnings related to the custom dimension being linked to a custom data source / data set.",
+	//       "location": "query",
+	//       "type": "boolean"
+	//     },
+	//     "webPropertyId": {
+	//       "description": "Web property ID for the custom dimension to update.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions/{customDimensionId}",
+	//   "request": {
+	//     "$ref": "CustomDimension"
+	//   },
+	//   "response": {
+	//     "$ref": "CustomDimension"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/analytics.edit"
+	//   ]
+	// }
+
+}
+
+// method id "analytics.management.customDimensions.update":
+
+type ManagementCustomDimensionsUpdateCall struct {
+	s                 *Service
+	accountId         string
+	webPropertyId     string
+	customDimensionId string
+	customdimension   *CustomDimension
+	opt_              map[string]interface{}
+}
+
+// Update: Updates an existing custom dimension.
+func (r *ManagementCustomDimensionsService) Update(accountId string, webPropertyId string, customDimensionId string, customdimension *CustomDimension) *ManagementCustomDimensionsUpdateCall {
+	c := &ManagementCustomDimensionsUpdateCall{s: r.s, opt_: make(map[string]interface{})}
+	c.accountId = accountId
+	c.webPropertyId = webPropertyId
+	c.customDimensionId = customDimensionId
+	c.customdimension = customdimension
+	return c
+}
+
+// IgnoreCustomDataSourceLinks sets the optional parameter
+// "ignoreCustomDataSourceLinks": Force the update and ignore any
+// warnings related to the custom dimension being linked to a custom
+// data source / data set.
+func (c *ManagementCustomDimensionsUpdateCall) IgnoreCustomDataSourceLinks(ignoreCustomDataSourceLinks bool) *ManagementCustomDimensionsUpdateCall {
+	c.opt_["ignoreCustomDataSourceLinks"] = ignoreCustomDataSourceLinks
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *ManagementCustomDimensionsUpdateCall) Fields(s ...googleapi.Field) *ManagementCustomDimensionsUpdateCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *ManagementCustomDimensionsUpdateCall) Do() (*CustomDimension, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.customdimension)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["ignoreCustomDataSourceLinks"]; ok {
+		params.Set("ignoreCustomDataSourceLinks", fmt.Sprintf("%v", v))
+	}
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions/{customDimensionId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("PUT", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"accountId":         c.accountId,
+		"webPropertyId":     c.webPropertyId,
+		"customDimensionId": c.customDimensionId,
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *CustomDimension
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Updates an existing custom dimension.",
+	//   "httpMethod": "PUT",
+	//   "id": "analytics.management.customDimensions.update",
+	//   "parameterOrder": [
+	//     "accountId",
+	//     "webPropertyId",
+	//     "customDimensionId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "Account ID for the custom dimension to update.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "customDimensionId": {
+	//       "description": "Custom dimension ID for the custom dimension to update.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "ignoreCustomDataSourceLinks": {
+	//       "default": "false",
+	//       "description": "Force the update and ignore any warnings related to the custom dimension being linked to a custom data source / data set.",
+	//       "location": "query",
+	//       "type": "boolean"
+	//     },
+	//     "webPropertyId": {
+	//       "description": "Web property ID for the custom dimension to update.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customDimensions/{customDimensionId}",
+	//   "request": {
+	//     "$ref": "CustomDimension"
+	//   },
+	//   "response": {
+	//     "$ref": "CustomDimension"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/analytics.edit"
+	//   ]
+	// }
+
+}
+
+// method id "analytics.management.customMetrics.get":
+
+type ManagementCustomMetricsGetCall struct {
+	s              *Service
+	accountId      string
+	webPropertyId  string
+	customMetricId string
+	opt_           map[string]interface{}
+}
+
+// Get: Get a custom metric to which the user has access.
+func (r *ManagementCustomMetricsService) Get(accountId string, webPropertyId string, customMetricId string) *ManagementCustomMetricsGetCall {
+	c := &ManagementCustomMetricsGetCall{s: r.s, opt_: make(map[string]interface{})}
+	c.accountId = accountId
+	c.webPropertyId = webPropertyId
+	c.customMetricId = customMetricId
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *ManagementCustomMetricsGetCall) Fields(s ...googleapi.Field) *ManagementCustomMetricsGetCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *ManagementCustomMetricsGetCall) Do() (*CustomMetric, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics/{customMetricId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"accountId":      c.accountId,
+		"webPropertyId":  c.webPropertyId,
+		"customMetricId": c.customMetricId,
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *CustomMetric
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Get a custom metric to which the user has access.",
+	//   "httpMethod": "GET",
+	//   "id": "analytics.management.customMetrics.get",
+	//   "parameterOrder": [
+	//     "accountId",
+	//     "webPropertyId",
+	//     "customMetricId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "Account ID for the custom metric to retrieve.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "customMetricId": {
+	//       "description": "The ID of the custom metric to retrieve.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "webPropertyId": {
+	//       "description": "Web property ID for the custom metric to retrieve.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics/{customMetricId}",
+	//   "response": {
+	//     "$ref": "CustomMetric"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/analytics.edit",
+	//     "https://www.googleapis.com/auth/analytics.readonly"
+	//   ]
+	// }
+
+}
+
+// method id "analytics.management.customMetrics.insert":
+
+type ManagementCustomMetricsInsertCall struct {
+	s             *Service
+	accountId     string
+	webPropertyId string
+	custommetric  *CustomMetric
+	opt_          map[string]interface{}
+}
+
+// Insert: Create a new custom metric.
+func (r *ManagementCustomMetricsService) Insert(accountId string, webPropertyId string, custommetric *CustomMetric) *ManagementCustomMetricsInsertCall {
+	c := &ManagementCustomMetricsInsertCall{s: r.s, opt_: make(map[string]interface{})}
+	c.accountId = accountId
+	c.webPropertyId = webPropertyId
+	c.custommetric = custommetric
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *ManagementCustomMetricsInsertCall) Fields(s ...googleapi.Field) *ManagementCustomMetricsInsertCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *ManagementCustomMetricsInsertCall) Do() (*CustomMetric, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.custommetric)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("POST", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"accountId":     c.accountId,
+		"webPropertyId": c.webPropertyId,
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *CustomMetric
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Create a new custom metric.",
+	//   "httpMethod": "POST",
+	//   "id": "analytics.management.customMetrics.insert",
+	//   "parameterOrder": [
+	//     "accountId",
+	//     "webPropertyId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "Account ID for the custom metric to create.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "webPropertyId": {
+	//       "description": "Web property ID for the custom dimension to create.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics",
+	//   "request": {
+	//     "$ref": "CustomMetric"
+	//   },
+	//   "response": {
+	//     "$ref": "CustomMetric"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/analytics.edit"
+	//   ]
+	// }
+
+}
+
+// method id "analytics.management.customMetrics.list":
+
+type ManagementCustomMetricsListCall struct {
+	s             *Service
+	accountId     string
+	webPropertyId string
+	opt_          map[string]interface{}
+}
+
+// List: Lists custom metrics to which the user has access.
+func (r *ManagementCustomMetricsService) List(accountId string, webPropertyId string) *ManagementCustomMetricsListCall {
+	c := &ManagementCustomMetricsListCall{s: r.s, opt_: make(map[string]interface{})}
+	c.accountId = accountId
+	c.webPropertyId = webPropertyId
+	return c
+}
+
+// MaxResults sets the optional parameter "max-results": The maximum
+// number of custom metrics to include in this response.
+func (c *ManagementCustomMetricsListCall) MaxResults(maxResults int64) *ManagementCustomMetricsListCall {
+	c.opt_["max-results"] = maxResults
+	return c
+}
+
+// StartIndex sets the optional parameter "start-index": An index of the
+// first entity to retrieve. Use this parameter as a pagination
+// mechanism along with the max-results parameter.
+func (c *ManagementCustomMetricsListCall) StartIndex(startIndex int64) *ManagementCustomMetricsListCall {
+	c.opt_["start-index"] = startIndex
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *ManagementCustomMetricsListCall) Fields(s ...googleapi.Field) *ManagementCustomMetricsListCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *ManagementCustomMetricsListCall) Do() (*CustomMetrics, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["max-results"]; ok {
+		params.Set("max-results", fmt.Sprintf("%v", v))
+	}
+	if v, ok := c.opt_["start-index"]; ok {
+		params.Set("start-index", fmt.Sprintf("%v", v))
+	}
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"accountId":     c.accountId,
+		"webPropertyId": c.webPropertyId,
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *CustomMetrics
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Lists custom metrics to which the user has access.",
+	//   "httpMethod": "GET",
+	//   "id": "analytics.management.customMetrics.list",
+	//   "parameterOrder": [
+	//     "accountId",
+	//     "webPropertyId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "Account ID for the custom metrics to retrieve.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "max-results": {
+	//       "description": "The maximum number of custom metrics to include in this response.",
+	//       "format": "int32",
+	//       "location": "query",
+	//       "type": "integer"
+	//     },
+	//     "start-index": {
+	//       "description": "An index of the first entity to retrieve. Use this parameter as a pagination mechanism along with the max-results parameter.",
+	//       "format": "int32",
+	//       "location": "query",
+	//       "minimum": "1",
+	//       "type": "integer"
+	//     },
+	//     "webPropertyId": {
+	//       "description": "Web property ID for the custom metrics to retrieve.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics",
+	//   "response": {
+	//     "$ref": "CustomMetrics"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/analytics",
+	//     "https://www.googleapis.com/auth/analytics.readonly"
+	//   ]
+	// }
+
+}
+
+// method id "analytics.management.customMetrics.patch":
+
+type ManagementCustomMetricsPatchCall struct {
+	s              *Service
+	accountId      string
+	webPropertyId  string
+	customMetricId string
+	custommetric   *CustomMetric
+	opt_           map[string]interface{}
+}
+
+// Patch: Updates an existing custom metric. This method supports patch
+// semantics.
+func (r *ManagementCustomMetricsService) Patch(accountId string, webPropertyId string, customMetricId string, custommetric *CustomMetric) *ManagementCustomMetricsPatchCall {
+	c := &ManagementCustomMetricsPatchCall{s: r.s, opt_: make(map[string]interface{})}
+	c.accountId = accountId
+	c.webPropertyId = webPropertyId
+	c.customMetricId = customMetricId
+	c.custommetric = custommetric
+	return c
+}
+
+// IgnoreCustomDataSourceLinks sets the optional parameter
+// "ignoreCustomDataSourceLinks": Force the update and ignore any
+// warnings related to the custom metric being linked to a custom data
+// source / data set.
+func (c *ManagementCustomMetricsPatchCall) IgnoreCustomDataSourceLinks(ignoreCustomDataSourceLinks bool) *ManagementCustomMetricsPatchCall {
+	c.opt_["ignoreCustomDataSourceLinks"] = ignoreCustomDataSourceLinks
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *ManagementCustomMetricsPatchCall) Fields(s ...googleapi.Field) *ManagementCustomMetricsPatchCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *ManagementCustomMetricsPatchCall) Do() (*CustomMetric, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.custommetric)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["ignoreCustomDataSourceLinks"]; ok {
+		params.Set("ignoreCustomDataSourceLinks", fmt.Sprintf("%v", v))
+	}
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics/{customMetricId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("PATCH", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"accountId":      c.accountId,
+		"webPropertyId":  c.webPropertyId,
+		"customMetricId": c.customMetricId,
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *CustomMetric
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Updates an existing custom metric. This method supports patch semantics.",
+	//   "httpMethod": "PATCH",
+	//   "id": "analytics.management.customMetrics.patch",
+	//   "parameterOrder": [
+	//     "accountId",
+	//     "webPropertyId",
+	//     "customMetricId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "Account ID for the custom metric to update.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "customMetricId": {
+	//       "description": "Custom metric ID for the custom metric to update.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "ignoreCustomDataSourceLinks": {
+	//       "default": "false",
+	//       "description": "Force the update and ignore any warnings related to the custom metric being linked to a custom data source / data set.",
+	//       "location": "query",
+	//       "type": "boolean"
+	//     },
+	//     "webPropertyId": {
+	//       "description": "Web property ID for the custom metric to update.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics/{customMetricId}",
+	//   "request": {
+	//     "$ref": "CustomMetric"
+	//   },
+	//   "response": {
+	//     "$ref": "CustomMetric"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/analytics.edit"
+	//   ]
+	// }
+
+}
+
+// method id "analytics.management.customMetrics.update":
+
+type ManagementCustomMetricsUpdateCall struct {
+	s              *Service
+	accountId      string
+	webPropertyId  string
+	customMetricId string
+	custommetric   *CustomMetric
+	opt_           map[string]interface{}
+}
+
+// Update: Updates an existing custom metric.
+func (r *ManagementCustomMetricsService) Update(accountId string, webPropertyId string, customMetricId string, custommetric *CustomMetric) *ManagementCustomMetricsUpdateCall {
+	c := &ManagementCustomMetricsUpdateCall{s: r.s, opt_: make(map[string]interface{})}
+	c.accountId = accountId
+	c.webPropertyId = webPropertyId
+	c.customMetricId = customMetricId
+	c.custommetric = custommetric
+	return c
+}
+
+// IgnoreCustomDataSourceLinks sets the optional parameter
+// "ignoreCustomDataSourceLinks": Force the update and ignore any
+// warnings related to the custom metric being linked to a custom data
+// source / data set.
+func (c *ManagementCustomMetricsUpdateCall) IgnoreCustomDataSourceLinks(ignoreCustomDataSourceLinks bool) *ManagementCustomMetricsUpdateCall {
+	c.opt_["ignoreCustomDataSourceLinks"] = ignoreCustomDataSourceLinks
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *ManagementCustomMetricsUpdateCall) Fields(s ...googleapi.Field) *ManagementCustomMetricsUpdateCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *ManagementCustomMetricsUpdateCall) Do() (*CustomMetric, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.custommetric)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["ignoreCustomDataSourceLinks"]; ok {
+		params.Set("ignoreCustomDataSourceLinks", fmt.Sprintf("%v", v))
+	}
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics/{customMetricId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("PUT", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"accountId":      c.accountId,
+		"webPropertyId":  c.webPropertyId,
+		"customMetricId": c.customMetricId,
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *CustomMetric
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Updates an existing custom metric.",
+	//   "httpMethod": "PUT",
+	//   "id": "analytics.management.customMetrics.update",
+	//   "parameterOrder": [
+	//     "accountId",
+	//     "webPropertyId",
+	//     "customMetricId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "Account ID for the custom metric to update.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "customMetricId": {
+	//       "description": "Custom metric ID for the custom metric to update.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "ignoreCustomDataSourceLinks": {
+	//       "default": "false",
+	//       "description": "Force the update and ignore any warnings related to the custom metric being linked to a custom data source / data set.",
+	//       "location": "query",
+	//       "type": "boolean"
+	//     },
+	//     "webPropertyId": {
+	//       "description": "Web property ID for the custom metric to update.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "management/accounts/{accountId}/webproperties/{webPropertyId}/customMetrics/{customMetricId}",
+	//   "request": {
+	//     "$ref": "CustomMetric"
+	//   },
+	//   "response": {
+	//     "$ref": "CustomMetric"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/analytics.edit"
+	//   ]
+	// }
+
+}
+
 // method id "analytics.management.dailyUploads.delete":
 
 type ManagementDailyUploadsDeleteCall struct {
@@ -4258,6 +5585,10 @@
 	type_              string
 	opt_               map[string]interface{}
 	media_             io.Reader
+	resumable_         googleapi.SizeReaderAt
+	mediaType_         string
+	ctx_               context.Context
+	protocol_          string
 }
 
 // Upload: Update/Overwrite data for a custom data source.
@@ -4279,8 +5610,32 @@
 	c.opt_["reset"] = reset
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *ManagementDailyUploadsUploadCall) Media(r io.Reader) *ManagementDailyUploadsUploadCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *ManagementDailyUploadsUploadCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *ManagementDailyUploadsUploadCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *ManagementDailyUploadsUploadCall) ProgressUpdater(pu googleapi.ProgressUpdater) *ManagementDailyUploadsUploadCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -4305,14 +5660,27 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "management/accounts/{accountId}/webproperties/{webPropertyId}/customDataSources/{customDataSourceId}/dailyUploads/{date}/uploads")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"accountId":          c.accountId,
@@ -4320,10 +5688,19 @@
 		"customDataSourceId": c.customDataSourceId,
 		"date":               c.date,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -4333,6 +5710,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *DailyUploadAppend
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -8950,6 +10342,10 @@
 	customDataSourceId string
 	opt_               map[string]interface{}
 	media_             io.Reader
+	resumable_         googleapi.SizeReaderAt
+	mediaType_         string
+	ctx_               context.Context
+	protocol_          string
 }
 
 // UploadData: Upload data for a custom data source.
@@ -8960,8 +10356,32 @@
 	c.customDataSourceId = customDataSourceId
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *ManagementUploadsUploadDataCall) Media(r io.Reader) *ManagementUploadsUploadDataCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *ManagementUploadsUploadDataCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *ManagementUploadsUploadDataCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *ManagementUploadsUploadDataCall) ProgressUpdater(pu googleapi.ProgressUpdater) *ManagementUploadsUploadDataCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -8981,24 +10401,46 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "management/accounts/{accountId}/webproperties/{webPropertyId}/customDataSources/{customDataSourceId}/uploads")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"accountId":          c.accountId,
 		"webPropertyId":      c.webPropertyId,
 		"customDataSourceId": c.customDataSourceId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -9008,6 +10450,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Upload
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
diff --git a/androidpublisher/v1.1/androidpublisher-gen.go b/androidpublisher/v1.1/androidpublisher-gen.go
index 245279c..2ddcec8 100644
--- a/androidpublisher/v1.1/androidpublisher-gen.go
+++ b/androidpublisher/v1.1/androidpublisher-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "androidpublisher:v1.1"
 const apiName = "androidpublisher"
diff --git a/androidpublisher/v1/androidpublisher-gen.go b/androidpublisher/v1/androidpublisher-gen.go
index 7b01dce..b25d601 100644
--- a/androidpublisher/v1/androidpublisher-gen.go
+++ b/androidpublisher/v1/androidpublisher-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "androidpublisher:v1"
 const apiName = "androidpublisher"
diff --git a/androidpublisher/v2/androidpublisher-gen.go b/androidpublisher/v2/androidpublisher-gen.go
index 847bb99..e4e1124 100644
--- a/androidpublisher/v2/androidpublisher-gen.go
+++ b/androidpublisher/v2/androidpublisher-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "androidpublisher:v2"
 const apiName = "androidpublisher"
@@ -1744,6 +1746,10 @@
 	editId        string
 	opt_          map[string]interface{}
 	media_        io.Reader
+	resumable_    googleapi.SizeReaderAt
+	mediaType_    string
+	ctx_          context.Context
+	protocol_     string
 }
 
 // Upload:
@@ -1753,8 +1759,32 @@
 	c.editId = editId
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *EditsApksUploadCall) Media(r io.Reader) *EditsApksUploadCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *EditsApksUploadCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *EditsApksUploadCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *EditsApksUploadCall) ProgressUpdater(pu googleapi.ProgressUpdater) *EditsApksUploadCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -1774,23 +1804,45 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "{packageName}/edits/{editId}/apks")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"packageName": c.packageNameid,
 		"editId":      c.editId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -1800,6 +1852,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Apk
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -2515,6 +2582,10 @@
 	expansionFileType string
 	opt_              map[string]interface{}
 	media_            io.Reader
+	resumable_        googleapi.SizeReaderAt
+	mediaType_        string
+	ctx_              context.Context
+	protocol_         string
 }
 
 // Upload: Uploads and attaches a new Expansion File to the APK
@@ -2527,8 +2598,32 @@
 	c.expansionFileType = expansionFileType
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *EditsExpansionfilesUploadCall) Media(r io.Reader) *EditsExpansionfilesUploadCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *EditsExpansionfilesUploadCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *EditsExpansionfilesUploadCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *EditsExpansionfilesUploadCall) ProgressUpdater(pu googleapi.ProgressUpdater) *EditsExpansionfilesUploadCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -2548,14 +2643,27 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "{packageName}/edits/{editId}/apks/{apkVersionCode}/expansionFiles/{expansionFileType}")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"packageName":       c.packageNameid,
@@ -2563,10 +2671,19 @@
 		"apkVersionCode":    strconv.FormatInt(c.apkVersionCode, 10),
 		"expansionFileType": c.expansionFileType,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -2576,6 +2693,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *ExpansionFilesUploadResponse
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -3042,6 +3174,10 @@
 	imageType     string
 	opt_          map[string]interface{}
 	media_        io.Reader
+	resumable_    googleapi.SizeReaderAt
+	mediaType_    string
+	ctx_          context.Context
+	protocol_     string
 }
 
 // Upload: Uploads a new image and adds it to the list of images for the
@@ -3054,8 +3190,32 @@
 	c.imageType = imageType
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *EditsImagesUploadCall) Media(r io.Reader) *EditsImagesUploadCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *EditsImagesUploadCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *EditsImagesUploadCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *EditsImagesUploadCall) ProgressUpdater(pu googleapi.ProgressUpdater) *EditsImagesUploadCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -3075,14 +3235,27 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "{packageName}/edits/{editId}/listings/{language}/{imageType}")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"packageName": c.packageNameid,
@@ -3090,10 +3263,19 @@
 		"language":    c.language,
 		"imageType":   c.imageType,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -3103,6 +3285,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *ImagesUploadResponse
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
diff --git a/appsactivity/v1/appsactivity-gen.go b/appsactivity/v1/appsactivity-gen.go
index ff7be6f..79e98a9 100644
--- a/appsactivity/v1/appsactivity-gen.go
+++ b/appsactivity/v1/appsactivity-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "appsactivity:v1"
 const apiName = "appsactivity"
diff --git a/appstate/v1/appstate-api.json b/appstate/v1/appstate-api.json
index ba7061d..f46cac7 100644
--- a/appstate/v1/appstate-api.json
+++ b/appstate/v1/appstate-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/JUJgalHvHhfQ2v-jfQiWrgEZD70\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/fq2tcHmUUpw0BqgyQT2jiNE1XgU\"",
  "discoveryVersion": "v1",
  "id": "appstate:v1",
  "name": "appstate",
  "canonicalName": "App State",
  "version": "v1",
- "revision": "20150107",
+ "revision": "20150119",
  "title": "Google App State API",
  "description": "The Google App State API.",
  "ownerDomain": "google.com",
diff --git a/appstate/v1/appstate-gen.go b/appstate/v1/appstate-gen.go
index 68c1fa4..bc8fb22 100644
--- a/appstate/v1/appstate-gen.go
+++ b/appstate/v1/appstate-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "appstate:v1"
 const apiName = "appstate"
diff --git a/audit/v1/audit-gen.go b/audit/v1/audit-gen.go
index 15c424e..0be8538 100644
--- a/audit/v1/audit-gen.go
+++ b/audit/v1/audit-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "audit:v1"
 const apiName = "audit"
diff --git a/autoscaler/v1beta2/autoscaler-gen.go b/autoscaler/v1beta2/autoscaler-gen.go
index 95b42e6..4886bc8 100644
--- a/autoscaler/v1beta2/autoscaler-gen.go
+++ b/autoscaler/v1beta2/autoscaler-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "autoscaler:v1beta2"
 const apiName = "autoscaler"
diff --git a/bigquery/v2/bigquery-gen.go b/bigquery/v2/bigquery-gen.go
index 515c687..20b8b80 100644
--- a/bigquery/v2/bigquery-gen.go
+++ b/bigquery/v2/bigquery-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "bigquery:v2"
 const apiName = "bigquery"
@@ -1952,11 +1954,15 @@
 // method id "bigquery.jobs.insert":
 
 type JobsInsertCall struct {
-	s         *Service
-	projectId string
-	job       *Job
-	opt_      map[string]interface{}
-	media_    io.Reader
+	s          *Service
+	projectId  string
+	job        *Job
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Starts a new asynchronous job.
@@ -1966,8 +1972,32 @@
 	c.job = job
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *JobsInsertCall) Media(r io.Reader) *JobsInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *JobsInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *JobsInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *JobsInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *JobsInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -1992,20 +2022,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/jobs")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"projectId": c.projectId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -2015,6 +2067,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Job
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
diff --git a/blogger/v2/blogger-gen.go b/blogger/v2/blogger-gen.go
index fb2eb55..f76c085 100644
--- a/blogger/v2/blogger-gen.go
+++ b/blogger/v2/blogger-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "blogger:v2"
 const apiName = "blogger"
diff --git a/blogger/v3/blogger-gen.go b/blogger/v3/blogger-gen.go
index 6e78ad8..d03fad0 100644
--- a/blogger/v3/blogger-gen.go
+++ b/blogger/v3/blogger-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "blogger:v3"
 const apiName = "blogger"
diff --git a/books/v1/books-api.json b/books/v1/books-api.json
index f813fcb..a424027 100644
--- a/books/v1/books-api.json
+++ b/books/v1/books-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/c7w4Ctd4BI5F_nfpCMLtv6FlZkE\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/WFak0mMx4MV3LiYt53aok2RzgnY\"",
  "discoveryVersion": "v1",
  "id": "books:v1",
  "name": "books",
  "version": "v1",
- "revision": "20141216",
+ "revision": "20150109",
  "title": "Books API",
  "description": "Lets you search for books and manage your Google Books library.",
  "ownerDomain": "google.com",
@@ -1082,6 +1082,9 @@
        "artUrl": {
         "type": "string"
        },
+       "gservicesKey": {
+        "type": "string"
+       },
        "id": {
         "type": "string"
        },
diff --git a/books/v1/books-gen.go b/books/v1/books-gen.go
index 9a9eb06..1662780 100644
--- a/books/v1/books-gen.go
+++ b/books/v1/books-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "books:v1"
 const apiName = "books"
@@ -972,6 +974,8 @@
 type OffersItems struct {
 	ArtUrl string `json:"artUrl,omitempty"`
 
+	GservicesKey string `json:"gservicesKey,omitempty"`
+
 	Id string `json:"id,omitempty"`
 
 	Items []*OffersItemsItems `json:"items,omitempty"`
diff --git a/calendar/v3/calendar-api.json b/calendar/v3/calendar-api.json
index dbb2456..ffdc467 100644
--- a/calendar/v3/calendar-api.json
+++ b/calendar/v3/calendar-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/Oih410CQXyK1bm6hFIQSVxW_CZo\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/IMFldb9KK1xhvvmWpAqvvLL8wN4\"",
  "discoveryVersion": "v1",
  "id": "calendar:v3",
  "name": "calendar",
  "version": "v3",
- "revision": "20141223",
+ "revision": "20141214",
  "title": "Calendar API",
  "description": "Lets you manipulate events and other calendar data.",
  "ownerDomain": "google.com",
diff --git a/calendar/v3/calendar-gen.go b/calendar/v3/calendar-gen.go
index d0ee2ed..5da8f35 100644
--- a/calendar/v3/calendar-gen.go
+++ b/calendar/v3/calendar-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "calendar:v3"
 const apiName = "calendar"
diff --git a/civicinfo/us_v1/civicinfo-gen.go b/civicinfo/us_v1/civicinfo-gen.go
index c8c1587..c029d86 100644
--- a/civicinfo/us_v1/civicinfo-gen.go
+++ b/civicinfo/us_v1/civicinfo-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "civicinfo:us_v1"
 const apiName = "civicinfo"
diff --git a/civicinfo/v1/civicinfo-gen.go b/civicinfo/v1/civicinfo-gen.go
index 08b9632..b9a3f27 100644
--- a/civicinfo/v1/civicinfo-gen.go
+++ b/civicinfo/v1/civicinfo-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "civicinfo:v1"
 const apiName = "civicinfo"
diff --git a/civicinfo/v2/civicinfo-gen.go b/civicinfo/v2/civicinfo-gen.go
index bc1acbc..6950330 100644
--- a/civicinfo/v2/civicinfo-gen.go
+++ b/civicinfo/v2/civicinfo-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "civicinfo:v2"
 const apiName = "civicinfo"
diff --git a/cloudlatencytest/v2/cloudlatencytest-gen.go b/cloudlatencytest/v2/cloudlatencytest-gen.go
index a080e54..4003115 100644
--- a/cloudlatencytest/v2/cloudlatencytest-gen.go
+++ b/cloudlatencytest/v2/cloudlatencytest-gen.go
@@ -12,6 +12,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -31,6 +32,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "cloudlatencytest:v2"
 const apiName = "cloudlatencytest"
diff --git a/cloudmonitoring/v2beta1/cloudmonitoring-api.json b/cloudmonitoring/v2beta1/cloudmonitoring-api.json
index 6a368a8..8b87666 100644
--- a/cloudmonitoring/v2beta1/cloudmonitoring-api.json
+++ b/cloudmonitoring/v2beta1/cloudmonitoring-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/YRzPcCfM8n4JDcuNuAKt7HespQc\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/_P21uQ2GK5pRjA-Z7Vcc84OAsKE\"",
  "discoveryVersion": "v1",
  "id": "cloudmonitoring:v2beta1",
  "name": "cloudmonitoring",
  "canonicalName": "Cloud Monitoring",
  "version": "v2beta1",
- "revision": "20150109",
+ "revision": "20141112",
  "title": "Cloud Monitoring API",
  "description": "API for accessing Google Cloud and API monitoring data.",
  "ownerDomain": "google.com",
@@ -493,6 +493,23 @@
      "httpMethod": "GET",
      "description": "List the data points of the time series that match the metric and labels values and that have data points in the interval. Large responses are paginated; use the nextPageToken returned in the response to request subsequent pages of results by setting the pageToken query parameter to the value of the nextPageToken.",
      "parameters": {
+      "aggregator": {
+       "type": "string",
+       "description": "The aggregation function that will reduce the data points in each window to a single point. This parameter is only valid for non-cumulative metric types.",
+       "enum": [
+        "max",
+        "mean",
+        "min",
+        "sum"
+       ],
+       "enumDescriptions": [
+        "",
+        "",
+        "",
+        ""
+       ],
+       "location": "query"
+      },
       "count": {
        "type": "integer",
        "description": "Maximum number of data points per page, which is used for pagination of results.",
@@ -537,6 +554,12 @@
        "pattern": "[0-9]+[smhdw]?",
        "location": "query"
       },
+      "window": {
+       "type": "string",
+       "description": "The sampling window. At most one data point will be returned for each window in the requested time interval. This parameter is only valid for non-cumulative metric types. Units:  \n- m: minute \n- h: hour \n- d: day \n- w: week  Examples: 3m, 4w. Only one unit is allowed, for example: 2w3d is not allowed; you should use 17d instead.",
+       "pattern": "[0-9]+[mhdw]?",
+       "location": "query"
+      },
       "youngest": {
        "type": "string",
        "description": "End of the time interval (inclusive), which is expressed as an RFC 3339 timestamp.",
@@ -569,6 +592,23 @@
      "httpMethod": "GET",
      "description": "List the descriptors of the time series that match the metric and labels values and that have data points in the interval. Large responses are paginated; use the nextPageToken returned in the response to request subsequent pages of results by setting the pageToken query parameter to the value of the nextPageToken.",
      "parameters": {
+      "aggregator": {
+       "type": "string",
+       "description": "The aggregation function that will reduce the data points in each window to a single point. This parameter is only valid for non-cumulative metric types.",
+       "enum": [
+        "max",
+        "mean",
+        "min",
+        "sum"
+       ],
+       "enumDescriptions": [
+        "",
+        "",
+        "",
+        ""
+       ],
+       "location": "query"
+      },
       "count": {
        "type": "integer",
        "description": "Maximum number of time series descriptors per page. Used for pagination. If not specified, count = 100.",
@@ -613,6 +653,12 @@
        "pattern": "[0-9]+[smhdw]?",
        "location": "query"
       },
+      "window": {
+       "type": "string",
+       "description": "The sampling window. At most one data point will be returned for each window in the requested time interval. This parameter is only valid for non-cumulative metric types. Units:  \n- m: minute \n- h: hour \n- d: day \n- w: week  Examples: 3m, 4w. Only one unit is allowed, for example: 2w3d is not allowed; you should use 17d instead.",
+       "pattern": "[0-9]+[mhdw]?",
+       "location": "query"
+      },
       "youngest": {
        "type": "string",
        "description": "End of the time interval (inclusive), which is expressed as an RFC 3339 timestamp.",
diff --git a/cloudmonitoring/v2beta1/cloudmonitoring-gen.go b/cloudmonitoring/v2beta1/cloudmonitoring-gen.go
index 47496ff..3cc8b76 100644
--- a/cloudmonitoring/v2beta1/cloudmonitoring-gen.go
+++ b/cloudmonitoring/v2beta1/cloudmonitoring-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "cloudmonitoring:v2beta1"
 const apiName = "cloudmonitoring"
@@ -485,6 +487,14 @@
 	return c
 }
 
+// Aggregator sets the optional parameter "aggregator": The aggregation
+// function that will reduce the data points in each window to a single
+// point. This parameter is only valid for non-cumulative metric types.
+func (c *TimeseriesListCall) Aggregator(aggregator string) *TimeseriesListCall {
+	c.opt_["aggregator"] = aggregator
+	return c
+}
+
 // Count sets the optional parameter "count": Maximum number of data
 // points per page, which is used for pagination of results.
 func (c *TimeseriesListCall) Count(count int64) *TimeseriesListCall {
@@ -547,6 +557,21 @@
 	return c
 }
 
+// Window sets the optional parameter "window": The sampling window. At
+// most one data point will be returned for each window in the requested
+// time interval. This parameter is only valid for non-cumulative metric
+// types. Units:
+// - m: minute
+// - h: hour
+// - d: day
+// - w: week
+// Examples: 3m, 4w. Only one unit is allowed, for example: 2w3d is not
+// allowed; you should use 17d instead.
+func (c *TimeseriesListCall) Window(window string) *TimeseriesListCall {
+	c.opt_["window"] = window
+	return c
+}
+
 // Fields allows partial responses to be retrieved.
 // See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
 // for more information.
@@ -560,6 +585,9 @@
 	params := make(url.Values)
 	params.Set("alt", "json")
 	params.Set("youngest", fmt.Sprintf("%v", c.youngest))
+	if v, ok := c.opt_["aggregator"]; ok {
+		params.Set("aggregator", fmt.Sprintf("%v", v))
+	}
 	if v, ok := c.opt_["count"]; ok {
 		params.Set("count", fmt.Sprintf("%v", v))
 	}
@@ -575,6 +603,9 @@
 	if v, ok := c.opt_["timespan"]; ok {
 		params.Set("timespan", fmt.Sprintf("%v", v))
 	}
+	if v, ok := c.opt_["window"]; ok {
+		params.Set("window", fmt.Sprintf("%v", v))
+	}
 	if v, ok := c.opt_["fields"]; ok {
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
@@ -609,6 +640,23 @@
 	//     "youngest"
 	//   ],
 	//   "parameters": {
+	//     "aggregator": {
+	//       "description": "The aggregation function that will reduce the data points in each window to a single point. This parameter is only valid for non-cumulative metric types.",
+	//       "enum": [
+	//         "max",
+	//         "mean",
+	//         "min",
+	//         "sum"
+	//       ],
+	//       "enumDescriptions": [
+	//         "",
+	//         "",
+	//         "",
+	//         ""
+	//       ],
+	//       "location": "query",
+	//       "type": "string"
+	//     },
 	//     "count": {
 	//       "default": "6000",
 	//       "description": "Maximum number of data points per page, which is used for pagination of results.",
@@ -653,6 +701,12 @@
 	//       "pattern": "[0-9]+[smhdw]?",
 	//       "type": "string"
 	//     },
+	//     "window": {
+	//       "description": "The sampling window. At most one data point will be returned for each window in the requested time interval. This parameter is only valid for non-cumulative metric types. Units:  \n- m: minute \n- h: hour \n- d: day \n- w: week  Examples: 3m, 4w. Only one unit is allowed, for example: 2w3d is not allowed; you should use 17d instead.",
+	//       "location": "query",
+	//       "pattern": "[0-9]+[mhdw]?",
+	//       "type": "string"
+	//     },
 	//     "youngest": {
 	//       "description": "End of the time interval (inclusive), which is expressed as an RFC 3339 timestamp.",
 	//       "location": "query",
@@ -699,6 +753,14 @@
 	return c
 }
 
+// Aggregator sets the optional parameter "aggregator": The aggregation
+// function that will reduce the data points in each window to a single
+// point. This parameter is only valid for non-cumulative metric types.
+func (c *TimeseriesDescriptorsListCall) Aggregator(aggregator string) *TimeseriesDescriptorsListCall {
+	c.opt_["aggregator"] = aggregator
+	return c
+}
+
 // Count sets the optional parameter "count": Maximum number of time
 // series descriptors per page. Used for pagination. If not specified,
 // count = 100.
@@ -762,6 +824,21 @@
 	return c
 }
 
+// Window sets the optional parameter "window": The sampling window. At
+// most one data point will be returned for each window in the requested
+// time interval. This parameter is only valid for non-cumulative metric
+// types. Units:
+// - m: minute
+// - h: hour
+// - d: day
+// - w: week
+// Examples: 3m, 4w. Only one unit is allowed, for example: 2w3d is not
+// allowed; you should use 17d instead.
+func (c *TimeseriesDescriptorsListCall) Window(window string) *TimeseriesDescriptorsListCall {
+	c.opt_["window"] = window
+	return c
+}
+
 // Fields allows partial responses to be retrieved.
 // See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
 // for more information.
@@ -775,6 +852,9 @@
 	params := make(url.Values)
 	params.Set("alt", "json")
 	params.Set("youngest", fmt.Sprintf("%v", c.youngest))
+	if v, ok := c.opt_["aggregator"]; ok {
+		params.Set("aggregator", fmt.Sprintf("%v", v))
+	}
 	if v, ok := c.opt_["count"]; ok {
 		params.Set("count", fmt.Sprintf("%v", v))
 	}
@@ -790,6 +870,9 @@
 	if v, ok := c.opt_["timespan"]; ok {
 		params.Set("timespan", fmt.Sprintf("%v", v))
 	}
+	if v, ok := c.opt_["window"]; ok {
+		params.Set("window", fmt.Sprintf("%v", v))
+	}
 	if v, ok := c.opt_["fields"]; ok {
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
@@ -824,6 +907,23 @@
 	//     "youngest"
 	//   ],
 	//   "parameters": {
+	//     "aggregator": {
+	//       "description": "The aggregation function that will reduce the data points in each window to a single point. This parameter is only valid for non-cumulative metric types.",
+	//       "enum": [
+	//         "max",
+	//         "mean",
+	//         "min",
+	//         "sum"
+	//       ],
+	//       "enumDescriptions": [
+	//         "",
+	//         "",
+	//         "",
+	//         ""
+	//       ],
+	//       "location": "query",
+	//       "type": "string"
+	//     },
 	//     "count": {
 	//       "default": "100",
 	//       "description": "Maximum number of time series descriptors per page. Used for pagination. If not specified, count = 100.",
@@ -868,6 +968,12 @@
 	//       "pattern": "[0-9]+[smhdw]?",
 	//       "type": "string"
 	//     },
+	//     "window": {
+	//       "description": "The sampling window. At most one data point will be returned for each window in the requested time interval. This parameter is only valid for non-cumulative metric types. Units:  \n- m: minute \n- h: hour \n- d: day \n- w: week  Examples: 3m, 4w. Only one unit is allowed, for example: 2w3d is not allowed; you should use 17d instead.",
+	//       "location": "query",
+	//       "pattern": "[0-9]+[mhdw]?",
+	//       "type": "string"
+	//     },
 	//     "youngest": {
 	//       "description": "End of the time interval (inclusive), which is expressed as an RFC 3339 timestamp.",
 	//       "location": "query",
diff --git a/compute/v1/compute-gen.go b/compute/v1/compute-gen.go
index 6e89001..cdb9b3b 100644
--- a/compute/v1/compute-gen.go
+++ b/compute/v1/compute-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "compute:v1"
 const apiName = "compute"
diff --git a/container/v1beta1/container-api.json b/container/v1beta1/container-api.json
index 07a3e5f..b54aeaf 100644
--- a/container/v1beta1/container-api.json
+++ b/container/v1beta1/container-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/O8FpeLrV7oJ_TJYD5E7IAdYY0iY\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/SXN1CJBboRcuoJhIRmlw8WIpB8c\"",
  "discoveryVersion": "v1",
  "id": "container:v1beta1",
  "name": "container",
  "version": "v1beta1",
- "revision": "20150105",
+ "revision": "20150113",
  "title": "Google Container Engine API",
  "description": "The Google Container Engine API is used for building and managing container based applications, powered by the open source Kubernetes technology.",
  "ownerDomain": "google.com",
@@ -252,6 +252,13 @@
      "type": "string",
      "description": "The name of a Google Compute Engine machine type (e.g. n1-standard-1).\n\nIf unspecified, the default machine type is n1-standard-1."
     },
+    "serviceAccounts": {
+     "type": "array",
+     "description": "The optional list of ServiceAccounts, each with their specified scopes, to be made available on all of the node VMs. In addition to the service accounts and scopes specified, the \"default\" account will always be created with the following scopes to ensure the correct functioning of the cluster:  \n- https://www.googleapis.com/auth/compute,\n- https://www.googleapis.com/auth/devstorage.read_only",
+     "items": {
+      "$ref": "ServiceAccount"
+     }
+    },
     "sourceImage": {
      "type": "string",
      "description": "The fully-specified name of a Google Compute Engine image. For example: https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/backports-debian-7-wheezy-vYYYYMMDD (where YYYMMDD is the version date).\n\nIf specifying an image, you are responsible for ensuring its compatibility with the Debian 7 backports image. We recommend leaving this field blank to accept the default backports-debian-7-wheezy value."
@@ -315,6 +322,25 @@
      "description": "The name of the Google Compute Engine zone in which the operation is taking place."
     }
    }
+  },
+  "ServiceAccount": {
+   "id": "ServiceAccount",
+   "type": "object",
+   "description": "A Compute Engine service account.",
+   "externalTypeName": "container.v1beta1.ServiceAccount",
+   "properties": {
+    "email": {
+     "type": "string",
+     "description": "Email address of the service account."
+    },
+    "scopes": {
+     "type": "array",
+     "description": "The list of scopes to be made available for this service account.",
+     "items": {
+      "type": "string"
+     }
+    }
+   }
   }
  },
  "resources": {
diff --git a/container/v1beta1/container-gen.go b/container/v1beta1/container-gen.go
index 01b30a3..5d39a0d 100644
--- a/container/v1beta1/container-gen.go
+++ b/container/v1beta1/container-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "container:v1beta1"
 const apiName = "container"
@@ -254,6 +256,17 @@
 	// n1-standard-1.
 	MachineType string `json:"machineType,omitempty"`
 
+	// ServiceAccounts: The optional list of ServiceAccounts, each with
+	// their specified scopes, to be made available on all of the node VMs.
+	// In addition to the service accounts and scopes specified, the
+	// "default" account will always be created with the following scopes to
+	// ensure the correct functioning of the cluster:
+	// -
+	// https://www.googleapis.com/auth/compute,
+	// -
+	// https://www.googleapis.com/auth/devstorage.read_only
+	ServiceAccounts []*ServiceAccount `json:"serviceAccounts,omitempty"`
+
 	// SourceImage: The fully-specified name of a Google Compute Engine
 	// image. For example:
 	// https://www.googleapis.com/compute/v1/projects/debian-cloud/global/ima
@@ -297,6 +310,15 @@
 	Zone string `json:"zone,omitempty"`
 }
 
+type ServiceAccount struct {
+	// Email: Email address of the service account.
+	Email string `json:"email,omitempty"`
+
+	// Scopes: The list of scopes to be made available for this service
+	// account.
+	Scopes []string `json:"scopes,omitempty"`
+}
+
 // method id "container.projects.clusters.list":
 
 type ProjectsClustersListCall struct {
diff --git a/content/v2/content-api.json b/content/v2/content-api.json
index d2f06f2..0e0681f 100644
--- a/content/v2/content-api.json
+++ b/content/v2/content-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/mxJxnaJAFqBnZvQqixN7zqS1MSM\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/UrgZohBY2VvsAHPm512CPTQfBLs\"",
  "discoveryVersion": "v1",
  "id": "content:v2",
  "name": "content",
  "canonicalName": "Shopping Content",
  "version": "v2",
- "revision": "20141203",
+ "revision": "20150115",
  "title": "Content API for Shopping",
  "description": "Manage product items, inventory, and Merchant Center accounts for Google Shopping.",
  "ownerDomain": "google.com",
@@ -154,6 +154,360 @@
     }
    }
   },
+  "AccountShipping": {
+   "id": "AccountShipping",
+   "type": "object",
+   "description": "The shipping settings of a merchant account.",
+   "properties": {
+    "accountId": {
+     "type": "string",
+     "description": "The ID of the account to which these account shipping settings belong.",
+     "format": "uint64",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    },
+    "carrierRates": {
+     "type": "array",
+     "description": "Carrier-based shipping calculations.",
+     "items": {
+      "$ref": "AccountShippingCarrierRate"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"content#accountShipping\".",
+     "default": "content#accountShipping"
+    },
+    "locationGroups": {
+     "type": "array",
+     "description": "Location groups for shipping.",
+     "items": {
+      "$ref": "AccountShippingLocationGroup"
+     }
+    },
+    "rateTables": {
+     "type": "array",
+     "description": "Rate tables definitions.",
+     "items": {
+      "$ref": "AccountShippingRateTable"
+     }
+    },
+    "services": {
+     "type": "array",
+     "description": "Shipping services describing shipping fees calculation.",
+     "items": {
+      "$ref": "AccountShippingShippingService"
+     }
+    }
+   }
+  },
+  "AccountShippingCarrierRate": {
+   "id": "AccountShippingCarrierRate",
+   "type": "object",
+   "description": "A carrier-calculated shipping rate.",
+   "properties": {
+    "carrier": {
+     "type": "string",
+     "description": "The carrier that is responsible for the shipping, such as \"UPS\", \"FedEx\", or \"USPS\".",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    },
+    "carrierService": {
+     "type": "string",
+     "description": "The carrier service, such as \"Ground\" or \"2Day\".",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    },
+    "modifierFlatRate": {
+     "$ref": "Price",
+     "description": "Additive shipping rate modifier."
+    },
+    "modifierPercent": {
+     "type": "string",
+     "description": "Multiplicative shipping rate modifier in percent. Represented as a floating point number without the percentage character."
+    },
+    "name": {
+     "type": "string",
+     "description": "The name of the carrier rate.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    },
+    "saleCountry": {
+     "type": "string",
+     "description": "Sale country for which this carrier rate is valid, represented as an ISO_3166-1 Alpha-2 code.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    },
+    "shippingOrigin": {
+     "type": "string",
+     "description": "Shipping origin represented as a postal code.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    }
+   }
+  },
+  "AccountShippingCondition": {
+   "id": "AccountShippingCondition",
+   "type": "object",
+   "properties": {
+    "deliveryLocationGroup": {
+     "type": "string",
+     "description": "Delivery location in terms of a location group name. A location group with this name must be specified among location groups."
+    },
+    "deliveryLocationId": {
+     "type": "string",
+     "description": "Delivery location in terms of a location ID. Can be used to represent administrative areas, smaller country subdivisions, or cities.",
+     "format": "int64"
+    },
+    "deliveryPostalCode": {
+     "type": "string",
+     "description": "Delivery location in terms of a postal code."
+    },
+    "deliveryPostalCodeRange": {
+     "$ref": "AccountShippingPostalCodeRange",
+     "description": "Delivery location in terms of a postal code range."
+    },
+    "priceMax": {
+     "$ref": "Price",
+     "description": "Maximum shipping price. Forms an interval between the maximum of smaller prices (exclusive) and this price (inclusive)."
+    },
+    "shippingLabel": {
+     "type": "string",
+     "description": "Shipping label of the product. The products with the label are matched."
+    },
+    "weightMax": {
+     "$ref": "Weight",
+     "description": "Maximum shipping weight. Forms an interval between the maximum of smaller weight (exclusive) and this weight (inclusive)."
+    }
+   }
+  },
+  "AccountShippingLocationGroup": {
+   "id": "AccountShippingLocationGroup",
+   "type": "object",
+   "description": "A user-defined locations group in a given country. All the locations of the group must be of the same type.",
+   "properties": {
+    "country": {
+     "type": "string",
+     "description": "The country in which this location group is, represented as ISO_3166-1 Alpha-2 code.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    },
+    "locationIds": {
+     "type": "array",
+     "description": "A location ID (also called criteria ID) representing administrative areas, smaller country subdivisions (counties), or cities.",
+     "items": {
+      "type": "string",
+      "format": "int64"
+     }
+    },
+    "name": {
+     "type": "string",
+     "description": "The name of the location group.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    },
+    "postalCodeRanges": {
+     "type": "array",
+     "description": "A postal code range representing a city or a set of cities.",
+     "items": {
+      "$ref": "AccountShippingPostalCodeRange"
+     }
+    },
+    "postalCodes": {
+     "type": "array",
+     "description": "A postal code representing a city or a set of cities.  \n- A single postal code (e.g., 12345)\n- A postal code prefix followed by a star (e.g., 1234*)",
+     "items": {
+      "type": "string"
+     }
+    }
+   }
+  },
+  "AccountShippingPostalCodeRange": {
+   "id": "AccountShippingPostalCodeRange",
+   "type": "object",
+   "description": "A postal code range, that can be either:  \n- A range of postal codes (e.g., start=12340, end=12359)\n- A range of postal codes prefixes (e.g., start=1234* end=1235*). Prefixes must be of the same length (e.g., start=12* end=2* is invalid).",
+   "properties": {
+    "end": {
+     "type": "string",
+     "description": "The last (inclusive) postal code or prefix of the range.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    },
+    "start": {
+     "type": "string",
+     "description": "The first (inclusive) postal code or prefix of the range.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    }
+   }
+  },
+  "AccountShippingRateTable": {
+   "id": "AccountShippingRateTable",
+   "type": "object",
+   "description": "A single or bi-dimensional table of shipping rates. Each dimension is defined in terms of consecutive price/weight ranges, delivery locations, or shipping labels.",
+   "properties": {
+    "content": {
+     "type": "array",
+     "description": "One-dimensional table cells define one condition along the same dimension. Bi-dimensional table cells use two dimension with respectively M and N distinct values and must contain exactly M * N cells with distinct conditions (for each possible value pairs).",
+     "items": {
+      "$ref": "AccountShippingRateTableCell"
+     }
+    },
+    "name": {
+     "type": "string",
+     "description": "The name of the rate table.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    },
+    "saleCountry": {
+     "type": "string",
+     "description": "Sale country for which this table is valid, represented as an ISO_3166-1 Alpha-2 code.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    }
+   }
+  },
+  "AccountShippingRateTableCell": {
+   "id": "AccountShippingRateTableCell",
+   "type": "object",
+   "properties": {
+    "condition": {
+     "$ref": "AccountShippingCondition",
+     "description": "Conditions for which the cell is valid. All cells in a table must use the same dimension or pair of dimensions among price, weight, shipping_label or delivery location. If no condition is specified, the cell acts as a catch-all and matches all the elements that are not matched by other cells in this dimension."
+    },
+    "rate": {
+     "$ref": "Price",
+     "description": "The rate applicable if the cell conditions are matched.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    }
+   }
+  },
+  "AccountShippingShippingService": {
+   "id": "AccountShippingShippingService",
+   "type": "object",
+   "description": "Shipping services provided in a country.",
+   "properties": {
+    "active": {
+     "type": "boolean",
+     "description": "Whether the shipping service is available."
+    },
+    "calculationMethod": {
+     "$ref": "AccountShippingShippingServiceCalculationMethod",
+     "description": "Calculation method for the \"simple\" case that needs no rules."
+    },
+    "costRuleTree": {
+     "$ref": "AccountShippingShippingServiceCostRule",
+     "description": "Decision tree for \"complicated\" shipping cost calculation."
+    },
+    "name": {
+     "type": "string",
+     "description": "The name of this shipping service.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    },
+    "saleCountry": {
+     "type": "string",
+     "description": "Sale country for which this service can be used, represented as an ISO_3166-1 Alpha-2 code.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    }
+   }
+  },
+  "AccountShippingShippingServiceCalculationMethod": {
+   "id": "AccountShippingShippingServiceCalculationMethod",
+   "type": "object",
+   "description": "Shipping cost calculation method. Exactly one of the field is set.",
+   "properties": {
+    "carrierRate": {
+     "type": "string",
+     "description": "Name of the carrier rate to use for the calculation."
+    },
+    "excluded": {
+     "type": "boolean",
+     "description": "Delivery is excluded. Valid only within cost rules tree."
+    },
+    "flatRate": {
+     "$ref": "Price",
+     "description": "Fixed price shipping, represented as a floating point number associated with a currency."
+    },
+    "percentageRate": {
+     "type": "string",
+     "description": "Percentage of the price, represented as a floating point number without the percentage character."
+    },
+    "rateTable": {
+     "type": "string",
+     "description": "Name of the rate table to use for the calculation."
+    }
+   }
+  },
+  "AccountShippingShippingServiceCostRule": {
+   "id": "AccountShippingShippingServiceCostRule",
+   "type": "object",
+   "description": "Building block of the cost calculation decision tree.  \n- The tree root should have no condition and no calculation method.\n- All the children must have a condition on the same dimension. The first child matching a condition is entered, therefore, price and weight conditions form contiguous intervals.\n- The last child of an element must have no condition and matches all elements not previously matched.\n- Children and calculation method are mutually exclusive, and exactly one of them must be defined; the root must only have children.",
+   "properties": {
+    "calculationMethod": {
+     "$ref": "AccountShippingShippingServiceCalculationMethod",
+     "description": "Final calculation method to be used only in leaf nodes."
+    },
+    "children": {
+     "type": "array",
+     "description": "Subsequent rules to be applied, only for inner nodes. The last child must not specify a condition and acts as a catch-all.",
+     "items": {
+      "$ref": "AccountShippingShippingServiceCostRule"
+     }
+    },
+    "condition": {
+     "$ref": "AccountShippingCondition",
+     "description": "Condition for this rule to be applicable. If no condition is specified, the rule acts as a catch-all."
+    }
+   }
+  },
   "AccountStatus": {
    "id": "AccountStatus",
    "type": "object",
@@ -246,6 +600,68 @@
     }
    }
   },
+  "AccountTax": {
+   "id": "AccountTax",
+   "type": "object",
+   "description": "The tax settings of a merchant account.",
+   "properties": {
+    "accountId": {
+     "type": "string",
+     "description": "The ID of the account to which these account tax settings belong.",
+     "format": "uint64",
+     "annotations": {
+      "required": [
+       "content.accounttax.update"
+      ]
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"content#accountTax\".",
+     "default": "content#accountTax"
+    },
+    "rules": {
+     "type": "array",
+     "description": "Tax rules. Updating the tax rules will enable US taxes (not reversible). Defining no rules is equivalent to not charging tax at all.",
+     "items": {
+      "$ref": "AccountTaxTaxRule"
+     }
+    }
+   }
+  },
+  "AccountTaxTaxRule": {
+   "id": "AccountTaxTaxRule",
+   "type": "object",
+   "description": "Tax calculation rule to apply in a state or province (USA only).",
+   "properties": {
+    "country": {
+     "type": "string",
+     "description": "Country code in which tax is applicable."
+    },
+    "locationId": {
+     "type": "string",
+     "description": "State (or province) is which the tax is applicable, described by its location id (also called criteria id).",
+     "format": "uint64",
+     "annotations": {
+      "required": [
+       "content.accounttax.update"
+      ]
+     }
+    },
+    "ratePercent": {
+     "type": "string",
+     "description": "Explicit tax rate in percent, represented as a floating point number without the percentage character. Must not be negative."
+    },
+    "shippingTaxed": {
+     "type": "boolean",
+     "description": "If true, shipping charges are also taxed."
+    },
+    "useGlobalRate": {
+     "type": "boolean",
+     "description": "Whether the tax rate is taken from a global tax table or specified explicitly."
+    }
+   }
+  },
   "AccountUser": {
    "id": "AccountUser",
    "type": "object",
@@ -276,7 +692,7 @@
   "AccountsCustomBatchRequestEntry": {
    "id": "AccountsCustomBatchRequestEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch request to the accounts service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch accounts request.",
    "properties": {
     "account": {
      "$ref": "Account",
@@ -323,7 +739,7 @@
   "AccountsCustomBatchResponseEntry": {
    "id": "AccountsCustomBatchResponseEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch response from the accounts service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch accounts response.",
    "properties": {
     "account": {
      "$ref": "Account",
@@ -366,6 +782,112 @@
     }
    }
   },
+  "AccountshippingCustomBatchRequest": {
+   "id": "AccountshippingCustomBatchRequest",
+   "type": "object",
+   "properties": {
+    "entries": {
+     "type": "array",
+     "description": "The request entries to be processed in the batch.",
+     "items": {
+      "$ref": "AccountshippingCustomBatchRequestEntry"
+     }
+    }
+   }
+  },
+  "AccountshippingCustomBatchRequestEntry": {
+   "id": "AccountshippingCustomBatchRequestEntry",
+   "type": "object",
+   "description": "A batch entry encoding a single non-batch accountshipping request.",
+   "properties": {
+    "accountId": {
+     "type": "string",
+     "description": "The ID of the account for which to get/update account shipping settings.",
+     "format": "uint64"
+    },
+    "accountShipping": {
+     "$ref": "AccountShipping",
+     "description": "The account shipping settings to update. Only defined if the method is update."
+    },
+    "batchId": {
+     "type": "integer",
+     "description": "An entry ID, unique within the batch request.",
+     "format": "uint32"
+    },
+    "merchantId": {
+     "type": "string",
+     "description": "The ID of the managing account.",
+     "format": "uint64"
+    },
+    "method": {
+     "type": "string"
+    }
+   }
+  },
+  "AccountshippingCustomBatchResponse": {
+   "id": "AccountshippingCustomBatchResponse",
+   "type": "object",
+   "properties": {
+    "entries": {
+     "type": "array",
+     "description": "The result of the execution of the batch requests.",
+     "items": {
+      "$ref": "AccountshippingCustomBatchResponseEntry"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"content#accountshippingCustomBatchResponse\".",
+     "default": "content#accountshippingCustomBatchResponse"
+    }
+   }
+  },
+  "AccountshippingCustomBatchResponseEntry": {
+   "id": "AccountshippingCustomBatchResponseEntry",
+   "type": "object",
+   "description": "A batch entry encoding a single non-batch accountshipping response.",
+   "properties": {
+    "accountShipping": {
+     "$ref": "AccountShipping",
+     "description": "The retrieved or updated account shipping settings."
+    },
+    "batchId": {
+     "type": "integer",
+     "description": "The ID of the request entry this entry responds to.",
+     "format": "uint32"
+    },
+    "errors": {
+     "$ref": "Errors",
+     "description": "A list of errors defined if and only if the request failed."
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"content#accountshippingCustomBatchResponseEntry\".",
+     "default": "content#accountshippingCustomBatchResponseEntry"
+    }
+   }
+  },
+  "AccountshippingListResponse": {
+   "id": "AccountshippingListResponse",
+   "type": "object",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"content#accountshippingListResponse\".",
+     "default": "content#accountshippingListResponse"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "The token for the retrieval of the next page of account shipping settings."
+    },
+    "resources": {
+     "type": "array",
+     "items": {
+      "$ref": "AccountShipping"
+     }
+    }
+   }
+  },
   "AccountstatusesCustomBatchRequest": {
    "id": "AccountstatusesCustomBatchRequest",
    "type": "object",
@@ -382,7 +904,7 @@
   "AccountstatusesCustomBatchRequestEntry": {
    "id": "AccountstatusesCustomBatchRequestEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch request to the accountstatuses service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch accountstatuses request.",
    "properties": {
     "accountId": {
      "type": "string",
@@ -426,7 +948,7 @@
   "AccountstatusesCustomBatchResponseEntry": {
    "id": "AccountstatusesCustomBatchResponseEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch response from the accountstatuses service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch accountstatuses response.",
    "properties": {
     "accountStatus": {
      "$ref": "AccountStatus",
@@ -464,6 +986,112 @@
     }
    }
   },
+  "AccounttaxCustomBatchRequest": {
+   "id": "AccounttaxCustomBatchRequest",
+   "type": "object",
+   "properties": {
+    "entries": {
+     "type": "array",
+     "description": "The request entries to be processed in the batch.",
+     "items": {
+      "$ref": "AccounttaxCustomBatchRequestEntry"
+     }
+    }
+   }
+  },
+  "AccounttaxCustomBatchRequestEntry": {
+   "id": "AccounttaxCustomBatchRequestEntry",
+   "type": "object",
+   "description": "A batch entry encoding a single non-batch accounttax request.",
+   "properties": {
+    "accountId": {
+     "type": "string",
+     "description": "The ID of the account for which to get/update account tax settings.",
+     "format": "uint64"
+    },
+    "accountTax": {
+     "$ref": "AccountTax",
+     "description": "The account tax settings to update. Only defined if the method is update."
+    },
+    "batchId": {
+     "type": "integer",
+     "description": "An entry ID, unique within the batch request.",
+     "format": "uint32"
+    },
+    "merchantId": {
+     "type": "string",
+     "description": "The ID of the managing account.",
+     "format": "uint64"
+    },
+    "method": {
+     "type": "string"
+    }
+   }
+  },
+  "AccounttaxCustomBatchResponse": {
+   "id": "AccounttaxCustomBatchResponse",
+   "type": "object",
+   "properties": {
+    "entries": {
+     "type": "array",
+     "description": "The result of the execution of the batch requests.",
+     "items": {
+      "$ref": "AccounttaxCustomBatchResponseEntry"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"content#accounttaxCustomBatchResponse\".",
+     "default": "content#accounttaxCustomBatchResponse"
+    }
+   }
+  },
+  "AccounttaxCustomBatchResponseEntry": {
+   "id": "AccounttaxCustomBatchResponseEntry",
+   "type": "object",
+   "description": "A batch entry encoding a single non-batch accounttax response.",
+   "properties": {
+    "accountTax": {
+     "$ref": "AccountTax",
+     "description": "The retrieved or updated account tax settings."
+    },
+    "batchId": {
+     "type": "integer",
+     "description": "The ID of the request entry this entry responds to.",
+     "format": "uint32"
+    },
+    "errors": {
+     "$ref": "Errors",
+     "description": "A list of errors defined if and only if the request failed."
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"content#accounttaxCustomBatchResponseEntry\".",
+     "default": "content#accounttaxCustomBatchResponseEntry"
+    }
+   }
+  },
+  "AccounttaxListResponse": {
+   "id": "AccounttaxListResponse",
+   "type": "object",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"content#accounttaxListResponse\".",
+     "default": "content#accounttaxListResponse"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "The token for the retrieval of the next page of account tax settings."
+    },
+    "resources": {
+     "type": "array",
+     "items": {
+      "$ref": "AccountTax"
+     }
+    }
+   }
+  },
   "Datafeed": {
    "id": "Datafeed",
    "type": "object",
@@ -716,7 +1344,7 @@
   "DatafeedsCustomBatchRequestEntry": {
    "id": "DatafeedsCustomBatchRequestEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch request to the datafeeds service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch datafeeds request.",
    "properties": {
     "batchId": {
      "type": "integer",
@@ -763,7 +1391,7 @@
   "DatafeedsCustomBatchResponseEntry": {
    "id": "DatafeedsCustomBatchResponseEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch response from the datafeeds service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch datafeeds response.",
    "properties": {
     "batchId": {
      "type": "integer",
@@ -817,7 +1445,7 @@
   "DatafeedstatusesCustomBatchRequestEntry": {
    "id": "DatafeedstatusesCustomBatchRequestEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch request to the datafeedstatuses service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch datafeedstatuses request.",
    "properties": {
     "batchId": {
      "type": "integer",
@@ -860,7 +1488,7 @@
   "DatafeedstatusesCustomBatchResponseEntry": {
    "id": "DatafeedstatusesCustomBatchResponseEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch response from the datafeedstatuses service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch datafeedstatuses response.",
    "properties": {
     "batchId": {
      "type": "integer",
@@ -988,7 +1616,7 @@
   "InventoryCustomBatchRequestEntry": {
    "id": "InventoryCustomBatchRequestEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch request to the inventory service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch inventory request.",
    "properties": {
     "batchId": {
      "type": "integer",
@@ -1035,7 +1663,7 @@
   "InventoryCustomBatchResponseEntry": {
    "id": "InventoryCustomBatchResponseEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch response from the inventory service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch inventory response.",
    "properties": {
     "batchId": {
      "type": "integer",
@@ -1200,7 +1828,7 @@
     },
     "customAttributes": {
      "type": "array",
-     "description": "A list of custom (merchant-provided) attributes.",
+     "description": "A list of custom (merchant-provided) attributes. It can also be used for submitting any attribute of the feed specification in its generic form (e.g., { \"name\": \"size type\", \"type\": \"text\", \"value\": \"regular\" }). This is useful for submitting attributes not explicitly exposed by the API.",
      "items": {
       "$ref": "ProductCustomAttribute"
      }
@@ -1249,7 +1877,7 @@
     },
     "expirationDate": {
      "type": "string",
-     "description": "Date that an item will expire."
+     "description": "Date on which the item should expire, as specified upon insertion. The actual expiration date in Google Shopping is exposed in productstatuses as googleExpirationDate and might be earlier if expirationDate is too far in the future."
     },
     "gender": {
      "type": "string",
@@ -1430,7 +2058,7 @@
    "properties": {
     "name": {
      "type": "string",
-     "description": "The name of the attribute."
+     "description": "The name of the attribute. Underscores will be replaced by spaces upon insertion."
     },
     "type": {
      "type": "string",
@@ -1459,7 +2087,7 @@
     },
     "name": {
      "type": "string",
-     "description": "The name of the group."
+     "description": "The name of the group. Underscores will be replaced by spaces upon insertion."
     }
    }
   },
@@ -1547,6 +2175,10 @@
    "type": "object",
    "description": "The status of a product, i.e., information about a product computed asynchronously by the data quality analysis.",
    "properties": {
+    "creationDate": {
+     "type": "string",
+     "description": "Date on which the item has been created."
+    },
     "dataQualityIssues": {
      "type": "array",
      "description": "A list of data quality issues associated with the product.",
@@ -1561,11 +2193,19 @@
       "$ref": "ProductStatusDestinationStatus"
      }
     },
+    "googleExpirationDate": {
+     "type": "string",
+     "description": "Date on which the item expires in Google Shopping."
+    },
     "kind": {
      "type": "string",
      "description": "Identifies what kind of resource this is. Value: the fixed string \"content#productStatus\".",
      "default": "content#productStatus"
     },
+    "lastUpdateDate": {
+     "type": "string",
+     "description": "Date on which the item has been last updated."
+    },
     "link": {
      "type": "string",
      "description": "The link to the product."
@@ -1710,7 +2350,7 @@
   "ProductsCustomBatchRequestEntry": {
    "id": "ProductsCustomBatchRequestEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch request to the products service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch products request.",
    "properties": {
     "batchId": {
      "type": "integer",
@@ -1756,7 +2396,7 @@
   "ProductsCustomBatchResponseEntry": {
    "id": "ProductsCustomBatchResponseEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch response from the products service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch products response.",
    "properties": {
     "batchId": {
      "type": "integer",
@@ -1815,7 +2455,7 @@
   "ProductstatusesCustomBatchRequestEntry": {
    "id": "ProductstatusesCustomBatchRequestEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch request to the productstatuses service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch productstatuses request.",
    "properties": {
     "batchId": {
      "type": "integer",
@@ -1857,7 +2497,7 @@
   "ProductstatusesCustomBatchResponseEntry": {
    "id": "ProductstatusesCustomBatchResponseEntry",
    "type": "object",
-   "description": "A batch entry encoding a single non-batch response from the productstatuses service of the Content API for Shopping.",
+   "description": "A batch entry encoding a single non-batch productstatuses response.",
    "properties": {
     "batchId": {
      "type": "integer",
@@ -1899,6 +2539,30 @@
      }
     }
    }
+  },
+  "Weight": {
+   "id": "Weight",
+   "type": "object",
+   "properties": {
+    "unit": {
+     "type": "string",
+     "description": "The weight unit.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    },
+    "value": {
+     "type": "string",
+     "description": "The weight represented as a number.",
+     "annotations": {
+      "required": [
+       "content.accountshipping.update"
+      ]
+     }
+    }
+   }
   }
  },
  "resources": {
@@ -2114,6 +2778,162 @@
     }
    }
   },
+  "accountshipping": {
+   "methods": {
+    "custombatch": {
+     "id": "content.accountshipping.custombatch",
+     "path": "accountshipping/batch",
+     "httpMethod": "POST",
+     "description": "Retrieves and updates the shipping settings of multiple accounts in a single request.",
+     "request": {
+      "$ref": "AccountshippingCustomBatchRequest"
+     },
+     "response": {
+      "$ref": "AccountshippingCustomBatchResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/content"
+     ]
+    },
+    "get": {
+     "id": "content.accountshipping.get",
+     "path": "{merchantId}/accountshipping/{accountId}",
+     "httpMethod": "GET",
+     "description": "Retrieves the shipping settings of the account.",
+     "parameters": {
+      "accountId": {
+       "type": "string",
+       "description": "The ID of the account for which to get/update account shipping settings.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      },
+      "merchantId": {
+       "type": "string",
+       "description": "The ID of the managing account.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "merchantId",
+      "accountId"
+     ],
+     "response": {
+      "$ref": "AccountShipping"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/content"
+     ]
+    },
+    "list": {
+     "id": "content.accountshipping.list",
+     "path": "{merchantId}/accountshipping",
+     "httpMethod": "GET",
+     "description": "Lists the shipping settings of the sub-accounts in your Merchant Center account.",
+     "parameters": {
+      "maxResults": {
+       "type": "integer",
+       "description": "The maximum number of shipping settings to return in the response, used for paging.",
+       "format": "uint32",
+       "location": "query"
+      },
+      "merchantId": {
+       "type": "string",
+       "description": "The ID of the managing account.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "The token returned by the previous request.",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "merchantId"
+     ],
+     "response": {
+      "$ref": "AccountshippingListResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/content"
+     ]
+    },
+    "patch": {
+     "id": "content.accountshipping.patch",
+     "path": "{merchantId}/accountshipping/{accountId}",
+     "httpMethod": "PATCH",
+     "description": "Updates the shipping settings of the account. This method supports patch semantics.",
+     "parameters": {
+      "accountId": {
+       "type": "string",
+       "description": "The ID of the account for which to get/update account shipping settings.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      },
+      "merchantId": {
+       "type": "string",
+       "description": "The ID of the managing account.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "merchantId",
+      "accountId"
+     ],
+     "request": {
+      "$ref": "AccountShipping"
+     },
+     "response": {
+      "$ref": "AccountShipping"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/content"
+     ]
+    },
+    "update": {
+     "id": "content.accountshipping.update",
+     "path": "{merchantId}/accountshipping/{accountId}",
+     "httpMethod": "PUT",
+     "description": "Updates the shipping settings of the account.",
+     "parameters": {
+      "accountId": {
+       "type": "string",
+       "description": "The ID of the account for which to get/update account shipping settings.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      },
+      "merchantId": {
+       "type": "string",
+       "description": "The ID of the managing account.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "merchantId",
+      "accountId"
+     ],
+     "request": {
+      "$ref": "AccountShipping"
+     },
+     "response": {
+      "$ref": "AccountShipping"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/content"
+     ]
+    }
+   }
+  },
   "accountstatuses": {
    "methods": {
     "custombatch": {
@@ -2199,6 +3019,162 @@
     }
    }
   },
+  "accounttax": {
+   "methods": {
+    "custombatch": {
+     "id": "content.accounttax.custombatch",
+     "path": "accounttax/batch",
+     "httpMethod": "POST",
+     "description": "Retrieves and updates tax settings of multiple accounts in a single request.",
+     "request": {
+      "$ref": "AccounttaxCustomBatchRequest"
+     },
+     "response": {
+      "$ref": "AccounttaxCustomBatchResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/content"
+     ]
+    },
+    "get": {
+     "id": "content.accounttax.get",
+     "path": "{merchantId}/accounttax/{accountId}",
+     "httpMethod": "GET",
+     "description": "Retrieves the tax settings of the account.",
+     "parameters": {
+      "accountId": {
+       "type": "string",
+       "description": "The ID of the account for which to get/update account tax settings.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      },
+      "merchantId": {
+       "type": "string",
+       "description": "The ID of the managing account.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "merchantId",
+      "accountId"
+     ],
+     "response": {
+      "$ref": "AccountTax"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/content"
+     ]
+    },
+    "list": {
+     "id": "content.accounttax.list",
+     "path": "{merchantId}/accounttax",
+     "httpMethod": "GET",
+     "description": "Lists the tax settings of the sub-accounts in your Merchant Center account.",
+     "parameters": {
+      "maxResults": {
+       "type": "integer",
+       "description": "The maximum number of tax settings to return in the response, used for paging.",
+       "format": "uint32",
+       "location": "query"
+      },
+      "merchantId": {
+       "type": "string",
+       "description": "The ID of the managing account.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "The token returned by the previous request.",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "merchantId"
+     ],
+     "response": {
+      "$ref": "AccounttaxListResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/content"
+     ]
+    },
+    "patch": {
+     "id": "content.accounttax.patch",
+     "path": "{merchantId}/accounttax/{accountId}",
+     "httpMethod": "PATCH",
+     "description": "Updates the tax settings of the account. This method supports patch semantics.",
+     "parameters": {
+      "accountId": {
+       "type": "string",
+       "description": "The ID of the account for which to get/update account tax settings.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      },
+      "merchantId": {
+       "type": "string",
+       "description": "The ID of the managing account.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "merchantId",
+      "accountId"
+     ],
+     "request": {
+      "$ref": "AccountTax"
+     },
+     "response": {
+      "$ref": "AccountTax"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/content"
+     ]
+    },
+    "update": {
+     "id": "content.accounttax.update",
+     "path": "{merchantId}/accounttax/{accountId}",
+     "httpMethod": "PUT",
+     "description": "Updates the tax settings of the account.",
+     "parameters": {
+      "accountId": {
+       "type": "string",
+       "description": "The ID of the account for which to get/update account tax settings.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      },
+      "merchantId": {
+       "type": "string",
+       "description": "The ID of the managing account.",
+       "required": true,
+       "format": "uint64",
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "merchantId",
+      "accountId"
+     ],
+     "request": {
+      "$ref": "AccountTax"
+     },
+     "response": {
+      "$ref": "AccountTax"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/content"
+     ]
+    }
+   }
+  },
   "datafeeds": {
    "methods": {
     "custombatch": {
diff --git a/content/v2/content-gen.go b/content/v2/content-gen.go
index f07bb1f..7fccf3b 100644
--- a/content/v2/content-gen.go
+++ b/content/v2/content-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "content:v2"
 const apiName = "content"
@@ -51,7 +53,9 @@
 	}
 	s := &Service{client: client, BasePath: basePath}
 	s.Accounts = NewAccountsService(s)
+	s.Accountshipping = NewAccountshippingService(s)
 	s.Accountstatuses = NewAccountstatusesService(s)
+	s.Accounttax = NewAccounttaxService(s)
 	s.Datafeeds = NewDatafeedsService(s)
 	s.Datafeedstatuses = NewDatafeedstatusesService(s)
 	s.Inventory = NewInventoryService(s)
@@ -66,8 +70,12 @@
 
 	Accounts *AccountsService
 
+	Accountshipping *AccountshippingService
+
 	Accountstatuses *AccountstatusesService
 
+	Accounttax *AccounttaxService
+
 	Datafeeds *DatafeedsService
 
 	Datafeedstatuses *DatafeedstatusesService
@@ -88,6 +96,15 @@
 	s *Service
 }
 
+func NewAccountshippingService(s *Service) *AccountshippingService {
+	rs := &AccountshippingService{s: s}
+	return rs
+}
+
+type AccountshippingService struct {
+	s *Service
+}
+
 func NewAccountstatusesService(s *Service) *AccountstatusesService {
 	rs := &AccountstatusesService{s: s}
 	return rs
@@ -97,6 +114,15 @@
 	s *Service
 }
 
+func NewAccounttaxService(s *Service) *AccounttaxService {
+	rs := &AccounttaxService{s: s}
+	return rs
+}
+
+type AccounttaxService struct {
+	s *Service
+}
+
 func NewDatafeedsService(s *Service) *DatafeedsService {
 	rs := &DatafeedsService{s: s}
 	return rs
@@ -184,6 +210,199 @@
 	Status string `json:"status,omitempty"`
 }
 
+type AccountShipping struct {
+	// AccountId: The ID of the account to which these account shipping
+	// settings belong.
+	AccountId uint64 `json:"accountId,omitempty,string"`
+
+	// CarrierRates: Carrier-based shipping calculations.
+	CarrierRates []*AccountShippingCarrierRate `json:"carrierRates,omitempty"`
+
+	// Kind: Identifies what kind of resource this is. Value: the fixed
+	// string "content#accountShipping".
+	Kind string `json:"kind,omitempty"`
+
+	// LocationGroups: Location groups for shipping.
+	LocationGroups []*AccountShippingLocationGroup `json:"locationGroups,omitempty"`
+
+	// RateTables: Rate tables definitions.
+	RateTables []*AccountShippingRateTable `json:"rateTables,omitempty"`
+
+	// Services: Shipping services describing shipping fees calculation.
+	Services []*AccountShippingShippingService `json:"services,omitempty"`
+}
+
+type AccountShippingCarrierRate struct {
+	// Carrier: The carrier that is responsible for the shipping, such as
+	// "UPS", "FedEx", or "USPS".
+	Carrier string `json:"carrier,omitempty"`
+
+	// CarrierService: The carrier service, such as "Ground" or "2Day".
+	CarrierService string `json:"carrierService,omitempty"`
+
+	// ModifierFlatRate: Additive shipping rate modifier.
+	ModifierFlatRate *Price `json:"modifierFlatRate,omitempty"`
+
+	// ModifierPercent: Multiplicative shipping rate modifier in percent.
+	// Represented as a floating point number without the percentage
+	// character.
+	ModifierPercent string `json:"modifierPercent,omitempty"`
+
+	// Name: The name of the carrier rate.
+	Name string `json:"name,omitempty"`
+
+	// SaleCountry: Sale country for which this carrier rate is valid,
+	// represented as an ISO_3166-1 Alpha-2 code.
+	SaleCountry string `json:"saleCountry,omitempty"`
+
+	// ShippingOrigin: Shipping origin represented as a postal code.
+	ShippingOrigin string `json:"shippingOrigin,omitempty"`
+}
+
+type AccountShippingCondition struct {
+	// DeliveryLocationGroup: Delivery location in terms of a location group
+	// name. A location group with this name must be specified among
+	// location groups.
+	DeliveryLocationGroup string `json:"deliveryLocationGroup,omitempty"`
+
+	// DeliveryLocationId: Delivery location in terms of a location ID. Can
+	// be used to represent administrative areas, smaller country
+	// subdivisions, or cities.
+	DeliveryLocationId int64 `json:"deliveryLocationId,omitempty,string"`
+
+	// DeliveryPostalCode: Delivery location in terms of a postal code.
+	DeliveryPostalCode string `json:"deliveryPostalCode,omitempty"`
+
+	// DeliveryPostalCodeRange: Delivery location in terms of a postal code
+	// range.
+	DeliveryPostalCodeRange *AccountShippingPostalCodeRange `json:"deliveryPostalCodeRange,omitempty"`
+
+	// PriceMax: Maximum shipping price. Forms an interval between the
+	// maximum of smaller prices (exclusive) and this price (inclusive).
+	PriceMax *Price `json:"priceMax,omitempty"`
+
+	// ShippingLabel: Shipping label of the product. The products with the
+	// label are matched.
+	ShippingLabel string `json:"shippingLabel,omitempty"`
+
+	// WeightMax: Maximum shipping weight. Forms an interval between the
+	// maximum of smaller weight (exclusive) and this weight (inclusive).
+	WeightMax *Weight `json:"weightMax,omitempty"`
+}
+
+type AccountShippingLocationGroup struct {
+	// Country: The country in which this location group is, represented as
+	// ISO_3166-1 Alpha-2 code.
+	Country string `json:"country,omitempty"`
+
+	// LocationIds: A location ID (also called criteria ID) representing
+	// administrative areas, smaller country subdivisions (counties), or
+	// cities.
+	LocationIds googleapi.Int64s `json:"locationIds,omitempty"`
+
+	// Name: The name of the location group.
+	Name string `json:"name,omitempty"`
+
+	// PostalCodeRanges: A postal code range representing a city or a set of
+	// cities.
+	PostalCodeRanges []*AccountShippingPostalCodeRange `json:"postalCodeRanges,omitempty"`
+
+	// PostalCodes: A postal code representing a city or a set of cities.
+	//
+	// - A single postal code (e.g., 12345)
+	// - A postal code prefix followed
+	// by a star (e.g., 1234*)
+	PostalCodes []string `json:"postalCodes,omitempty"`
+}
+
+type AccountShippingPostalCodeRange struct {
+	// End: The last (inclusive) postal code or prefix of the range.
+	End string `json:"end,omitempty"`
+
+	// Start: The first (inclusive) postal code or prefix of the range.
+	Start string `json:"start,omitempty"`
+}
+
+type AccountShippingRateTable struct {
+	// Content: One-dimensional table cells define one condition along the
+	// same dimension. Bi-dimensional table cells use two dimension with
+	// respectively M and N distinct values and must contain exactly M * N
+	// cells with distinct conditions (for each possible value pairs).
+	Content []*AccountShippingRateTableCell `json:"content,omitempty"`
+
+	// Name: The name of the rate table.
+	Name string `json:"name,omitempty"`
+
+	// SaleCountry: Sale country for which this table is valid, represented
+	// as an ISO_3166-1 Alpha-2 code.
+	SaleCountry string `json:"saleCountry,omitempty"`
+}
+
+type AccountShippingRateTableCell struct {
+	// Condition: Conditions for which the cell is valid. All cells in a
+	// table must use the same dimension or pair of dimensions among price,
+	// weight, shipping_label or delivery location. If no condition is
+	// specified, the cell acts as a catch-all and matches all the elements
+	// that are not matched by other cells in this dimension.
+	Condition *AccountShippingCondition `json:"condition,omitempty"`
+
+	// Rate: The rate applicable if the cell conditions are matched.
+	Rate *Price `json:"rate,omitempty"`
+}
+
+type AccountShippingShippingService struct {
+	// Active: Whether the shipping service is available.
+	Active bool `json:"active,omitempty"`
+
+	// CalculationMethod: Calculation method for the "simple" case that
+	// needs no rules.
+	CalculationMethod *AccountShippingShippingServiceCalculationMethod `json:"calculationMethod,omitempty"`
+
+	// CostRuleTree: Decision tree for "complicated" shipping cost
+	// calculation.
+	CostRuleTree *AccountShippingShippingServiceCostRule `json:"costRuleTree,omitempty"`
+
+	// Name: The name of this shipping service.
+	Name string `json:"name,omitempty"`
+
+	// SaleCountry: Sale country for which this service can be used,
+	// represented as an ISO_3166-1 Alpha-2 code.
+	SaleCountry string `json:"saleCountry,omitempty"`
+}
+
+type AccountShippingShippingServiceCalculationMethod struct {
+	// CarrierRate: Name of the carrier rate to use for the calculation.
+	CarrierRate string `json:"carrierRate,omitempty"`
+
+	// Excluded: Delivery is excluded. Valid only within cost rules tree.
+	Excluded bool `json:"excluded,omitempty"`
+
+	// FlatRate: Fixed price shipping, represented as a floating point
+	// number associated with a currency.
+	FlatRate *Price `json:"flatRate,omitempty"`
+
+	// PercentageRate: Percentage of the price, represented as a floating
+	// point number without the percentage character.
+	PercentageRate string `json:"percentageRate,omitempty"`
+
+	// RateTable: Name of the rate table to use for the calculation.
+	RateTable string `json:"rateTable,omitempty"`
+}
+
+type AccountShippingShippingServiceCostRule struct {
+	// CalculationMethod: Final calculation method to be used only in leaf
+	// nodes.
+	CalculationMethod *AccountShippingShippingServiceCalculationMethod `json:"calculationMethod,omitempty"`
+
+	// Children: Subsequent rules to be applied, only for inner nodes. The
+	// last child must not specify a condition and acts as a catch-all.
+	Children []*AccountShippingShippingServiceCostRule `json:"children,omitempty"`
+
+	// Condition: Condition for this rule to be applicable. If no condition
+	// is specified, the rule acts as a catch-all.
+	Condition *AccountShippingCondition `json:"condition,omitempty"`
+}
+
 type AccountStatus struct {
 	// AccountId: The ID of the account for which the status is reported.
 	AccountId string `json:"accountId,omitempty"`
@@ -240,6 +459,41 @@
 	ValueOnLandingPage string `json:"valueOnLandingPage,omitempty"`
 }
 
+type AccountTax struct {
+	// AccountId: The ID of the account to which these account tax settings
+	// belong.
+	AccountId uint64 `json:"accountId,omitempty,string"`
+
+	// Kind: Identifies what kind of resource this is. Value: the fixed
+	// string "content#accountTax".
+	Kind string `json:"kind,omitempty"`
+
+	// Rules: Tax rules. Updating the tax rules will enable US taxes (not
+	// reversible). Defining no rules is equivalent to not charging tax at
+	// all.
+	Rules []*AccountTaxTaxRule `json:"rules,omitempty"`
+}
+
+type AccountTaxTaxRule struct {
+	// Country: Country code in which tax is applicable.
+	Country string `json:"country,omitempty"`
+
+	// LocationId: State (or province) is which the tax is applicable,
+	// described by its location id (also called criteria id).
+	LocationId uint64 `json:"locationId,omitempty,string"`
+
+	// RatePercent: Explicit tax rate in percent, represented as a floating
+	// point number without the percentage character. Must not be negative.
+	RatePercent string `json:"ratePercent,omitempty"`
+
+	// ShippingTaxed: If true, shipping charges are also taxed.
+	ShippingTaxed bool `json:"shippingTaxed,omitempty"`
+
+	// UseGlobalRate: Whether the tax rate is taken from a global tax table
+	// or specified explicitly.
+	UseGlobalRate bool `json:"useGlobalRate,omitempty"`
+}
+
 type AccountUser struct {
 	// Admin: Whether user is an admin.
 	Admin bool `json:"admin,omitempty"`
@@ -308,6 +562,65 @@
 	Resources []*Account `json:"resources,omitempty"`
 }
 
+type AccountshippingCustomBatchRequest struct {
+	// Entries: The request entries to be processed in the batch.
+	Entries []*AccountshippingCustomBatchRequestEntry `json:"entries,omitempty"`
+}
+
+type AccountshippingCustomBatchRequestEntry struct {
+	// AccountId: The ID of the account for which to get/update account
+	// shipping settings.
+	AccountId uint64 `json:"accountId,omitempty,string"`
+
+	// AccountShipping: The account shipping settings to update. Only
+	// defined if the method is update.
+	AccountShipping *AccountShipping `json:"accountShipping,omitempty"`
+
+	// BatchId: An entry ID, unique within the batch request.
+	BatchId int64 `json:"batchId,omitempty"`
+
+	// MerchantId: The ID of the managing account.
+	MerchantId uint64 `json:"merchantId,omitempty,string"`
+
+	Method string `json:"method,omitempty"`
+}
+
+type AccountshippingCustomBatchResponse struct {
+	// Entries: The result of the execution of the batch requests.
+	Entries []*AccountshippingCustomBatchResponseEntry `json:"entries,omitempty"`
+
+	// Kind: Identifies what kind of resource this is. Value: the fixed
+	// string "content#accountshippingCustomBatchResponse".
+	Kind string `json:"kind,omitempty"`
+}
+
+type AccountshippingCustomBatchResponseEntry struct {
+	// AccountShipping: The retrieved or updated account shipping settings.
+	AccountShipping *AccountShipping `json:"accountShipping,omitempty"`
+
+	// BatchId: The ID of the request entry this entry responds to.
+	BatchId int64 `json:"batchId,omitempty"`
+
+	// Errors: A list of errors defined if and only if the request failed.
+	Errors *Errors `json:"errors,omitempty"`
+
+	// Kind: Identifies what kind of resource this is. Value: the fixed
+	// string "content#accountshippingCustomBatchResponseEntry".
+	Kind string `json:"kind,omitempty"`
+}
+
+type AccountshippingListResponse struct {
+	// Kind: Identifies what kind of resource this is. Value: the fixed
+	// string "content#accountshippingListResponse".
+	Kind string `json:"kind,omitempty"`
+
+	// NextPageToken: The token for the retrieval of the next page of
+	// account shipping settings.
+	NextPageToken string `json:"nextPageToken,omitempty"`
+
+	Resources []*AccountShipping `json:"resources,omitempty"`
+}
+
 type AccountstatusesCustomBatchRequest struct {
 	// Entries: The request entries to be processed in the batch.
 	Entries []*AccountstatusesCustomBatchRequestEntry `json:"entries,omitempty"`
@@ -360,6 +673,65 @@
 	Resources []*AccountStatus `json:"resources,omitempty"`
 }
 
+type AccounttaxCustomBatchRequest struct {
+	// Entries: The request entries to be processed in the batch.
+	Entries []*AccounttaxCustomBatchRequestEntry `json:"entries,omitempty"`
+}
+
+type AccounttaxCustomBatchRequestEntry struct {
+	// AccountId: The ID of the account for which to get/update account tax
+	// settings.
+	AccountId uint64 `json:"accountId,omitempty,string"`
+
+	// AccountTax: The account tax settings to update. Only defined if the
+	// method is update.
+	AccountTax *AccountTax `json:"accountTax,omitempty"`
+
+	// BatchId: An entry ID, unique within the batch request.
+	BatchId int64 `json:"batchId,omitempty"`
+
+	// MerchantId: The ID of the managing account.
+	MerchantId uint64 `json:"merchantId,omitempty,string"`
+
+	Method string `json:"method,omitempty"`
+}
+
+type AccounttaxCustomBatchResponse struct {
+	// Entries: The result of the execution of the batch requests.
+	Entries []*AccounttaxCustomBatchResponseEntry `json:"entries,omitempty"`
+
+	// Kind: Identifies what kind of resource this is. Value: the fixed
+	// string "content#accounttaxCustomBatchResponse".
+	Kind string `json:"kind,omitempty"`
+}
+
+type AccounttaxCustomBatchResponseEntry struct {
+	// AccountTax: The retrieved or updated account tax settings.
+	AccountTax *AccountTax `json:"accountTax,omitempty"`
+
+	// BatchId: The ID of the request entry this entry responds to.
+	BatchId int64 `json:"batchId,omitempty"`
+
+	// Errors: A list of errors defined if and only if the request failed.
+	Errors *Errors `json:"errors,omitempty"`
+
+	// Kind: Identifies what kind of resource this is. Value: the fixed
+	// string "content#accounttaxCustomBatchResponseEntry".
+	Kind string `json:"kind,omitempty"`
+}
+
+type AccounttaxListResponse struct {
+	// Kind: Identifies what kind of resource this is. Value: the fixed
+	// string "content#accounttaxListResponse".
+	Kind string `json:"kind,omitempty"`
+
+	// NextPageToken: The token for the retrieval of the next page of
+	// account tax settings.
+	NextPageToken string `json:"nextPageToken,omitempty"`
+
+	Resources []*AccountTax `json:"resources,omitempty"`
+}
+
 type Datafeed struct {
 	// AttributeLanguage: The two-letter ISO 639-1 language in which the
 	// attributes are defined in the data feed.
@@ -785,7 +1157,11 @@
 	// ContentLanguage: The two-letter ISO 639-1 language code for the item.
 	ContentLanguage string `json:"contentLanguage,omitempty"`
 
-	// CustomAttributes: A list of custom (merchant-provided) attributes.
+	// CustomAttributes: A list of custom (merchant-provided) attributes. It
+	// can also be used for submitting any attribute of the feed
+	// specification in its generic form (e.g., { "name": "size type",
+	// "type": "text", "value": "regular" }). This is useful for submitting
+	// attributes not explicitly exposed by the API.
 	CustomAttributes []*ProductCustomAttribute `json:"customAttributes,omitempty"`
 
 	// CustomGroups: A list of custom (merchant-provided) custom attribute
@@ -822,7 +1198,10 @@
 	// directive 2010/30/EU.
 	EnergyEfficiencyClass string `json:"energyEfficiencyClass,omitempty"`
 
-	// ExpirationDate: Date that an item will expire.
+	// ExpirationDate: Date on which the item should expire, as specified
+	// upon insertion. The actual expiration date in Google Shopping is
+	// exposed in productstatuses as googleExpirationDate and might be
+	// earlier if expirationDate is too far in the future.
 	ExpirationDate string `json:"expirationDate,omitempty"`
 
 	// Gender: Target gender of the item.
@@ -948,7 +1327,8 @@
 }
 
 type ProductCustomAttribute struct {
-	// Name: The name of the attribute.
+	// Name: The name of the attribute. Underscores will be replaced by
+	// spaces upon insertion.
 	Name string `json:"name,omitempty"`
 
 	// Type: The type of the attribute.
@@ -966,7 +1346,8 @@
 	// Attributes: The sub-attributes.
 	Attributes []*ProductCustomAttribute `json:"attributes,omitempty"`
 
-	// Name: The name of the group.
+	// Name: The name of the group. Underscores will be replaced by spaces
+	// upon insertion.
 	Name string `json:"name,omitempty"`
 }
 
@@ -1028,6 +1409,9 @@
 }
 
 type ProductStatus struct {
+	// CreationDate: Date on which the item has been created.
+	CreationDate string `json:"creationDate,omitempty"`
+
 	// DataQualityIssues: A list of data quality issues associated with the
 	// product.
 	DataQualityIssues []*ProductStatusDataQualityIssue `json:"dataQualityIssues,omitempty"`
@@ -1035,10 +1419,17 @@
 	// DestinationStatuses: The intended destinations for the product.
 	DestinationStatuses []*ProductStatusDestinationStatus `json:"destinationStatuses,omitempty"`
 
+	// GoogleExpirationDate: Date on which the item expires in Google
+	// Shopping.
+	GoogleExpirationDate string `json:"googleExpirationDate,omitempty"`
+
 	// Kind: Identifies what kind of resource this is. Value: the fixed
 	// string "content#productStatus".
 	Kind string `json:"kind,omitempty"`
 
+	// LastUpdateDate: Date on which the item has been last updated.
+	LastUpdateDate string `json:"lastUpdateDate,omitempty"`
+
 	// Link: The link to the product.
 	Link string `json:"link,omitempty"`
 
@@ -1241,6 +1632,14 @@
 	Resources []*ProductStatus `json:"resources,omitempty"`
 }
 
+type Weight struct {
+	// Unit: The weight unit.
+	Unit string `json:"unit,omitempty"`
+
+	// Value: The weight represented as a number.
+	Value string `json:"value,omitempty"`
+}
+
 // method id "content.accounts.custombatch":
 
 type AccountsCustombatchCall struct {
@@ -1878,6 +2277,476 @@
 
 }
 
+// method id "content.accountshipping.custombatch":
+
+type AccountshippingCustombatchCall struct {
+	s                                 *Service
+	accountshippingcustombatchrequest *AccountshippingCustomBatchRequest
+	opt_                              map[string]interface{}
+}
+
+// Custombatch: Retrieves and updates the shipping settings of multiple
+// accounts in a single request.
+func (r *AccountshippingService) Custombatch(accountshippingcustombatchrequest *AccountshippingCustomBatchRequest) *AccountshippingCustombatchCall {
+	c := &AccountshippingCustombatchCall{s: r.s, opt_: make(map[string]interface{})}
+	c.accountshippingcustombatchrequest = accountshippingcustombatchrequest
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AccountshippingCustombatchCall) Fields(s ...googleapi.Field) *AccountshippingCustombatchCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AccountshippingCustombatchCall) Do() (*AccountshippingCustomBatchResponse, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.accountshippingcustombatchrequest)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "accountshipping/batch")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("POST", urls, body)
+	googleapi.SetOpaque(req.URL)
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AccountshippingCustomBatchResponse
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Retrieves and updates the shipping settings of multiple accounts in a single request.",
+	//   "httpMethod": "POST",
+	//   "id": "content.accountshipping.custombatch",
+	//   "path": "accountshipping/batch",
+	//   "request": {
+	//     "$ref": "AccountshippingCustomBatchRequest"
+	//   },
+	//   "response": {
+	//     "$ref": "AccountshippingCustomBatchResponse"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/content"
+	//   ]
+	// }
+
+}
+
+// method id "content.accountshipping.get":
+
+type AccountshippingGetCall struct {
+	s          *Service
+	merchantId uint64
+	accountId  uint64
+	opt_       map[string]interface{}
+}
+
+// Get: Retrieves the shipping settings of the account.
+func (r *AccountshippingService) Get(merchantId uint64, accountId uint64) *AccountshippingGetCall {
+	c := &AccountshippingGetCall{s: r.s, opt_: make(map[string]interface{})}
+	c.merchantId = merchantId
+	c.accountId = accountId
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AccountshippingGetCall) Fields(s ...googleapi.Field) *AccountshippingGetCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AccountshippingGetCall) Do() (*AccountShipping, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "{merchantId}/accountshipping/{accountId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"merchantId": strconv.FormatUint(c.merchantId, 10),
+		"accountId":  strconv.FormatUint(c.accountId, 10),
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AccountShipping
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Retrieves the shipping settings of the account.",
+	//   "httpMethod": "GET",
+	//   "id": "content.accountshipping.get",
+	//   "parameterOrder": [
+	//     "merchantId",
+	//     "accountId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "The ID of the account for which to get/update account shipping settings.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "merchantId": {
+	//       "description": "The ID of the managing account.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "{merchantId}/accountshipping/{accountId}",
+	//   "response": {
+	//     "$ref": "AccountShipping"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/content"
+	//   ]
+	// }
+
+}
+
+// method id "content.accountshipping.list":
+
+type AccountshippingListCall struct {
+	s          *Service
+	merchantId uint64
+	opt_       map[string]interface{}
+}
+
+// List: Lists the shipping settings of the sub-accounts in your
+// Merchant Center account.
+func (r *AccountshippingService) List(merchantId uint64) *AccountshippingListCall {
+	c := &AccountshippingListCall{s: r.s, opt_: make(map[string]interface{})}
+	c.merchantId = merchantId
+	return c
+}
+
+// MaxResults sets the optional parameter "maxResults": The maximum
+// number of shipping settings to return in the response, used for
+// paging.
+func (c *AccountshippingListCall) MaxResults(maxResults int64) *AccountshippingListCall {
+	c.opt_["maxResults"] = maxResults
+	return c
+}
+
+// PageToken sets the optional parameter "pageToken": The token returned
+// by the previous request.
+func (c *AccountshippingListCall) PageToken(pageToken string) *AccountshippingListCall {
+	c.opt_["pageToken"] = pageToken
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AccountshippingListCall) Fields(s ...googleapi.Field) *AccountshippingListCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AccountshippingListCall) Do() (*AccountshippingListResponse, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["maxResults"]; ok {
+		params.Set("maxResults", fmt.Sprintf("%v", v))
+	}
+	if v, ok := c.opt_["pageToken"]; ok {
+		params.Set("pageToken", fmt.Sprintf("%v", v))
+	}
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "{merchantId}/accountshipping")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"merchantId": strconv.FormatUint(c.merchantId, 10),
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AccountshippingListResponse
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Lists the shipping settings of the sub-accounts in your Merchant Center account.",
+	//   "httpMethod": "GET",
+	//   "id": "content.accountshipping.list",
+	//   "parameterOrder": [
+	//     "merchantId"
+	//   ],
+	//   "parameters": {
+	//     "maxResults": {
+	//       "description": "The maximum number of shipping settings to return in the response, used for paging.",
+	//       "format": "uint32",
+	//       "location": "query",
+	//       "type": "integer"
+	//     },
+	//     "merchantId": {
+	//       "description": "The ID of the managing account.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "pageToken": {
+	//       "description": "The token returned by the previous request.",
+	//       "location": "query",
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "{merchantId}/accountshipping",
+	//   "response": {
+	//     "$ref": "AccountshippingListResponse"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/content"
+	//   ]
+	// }
+
+}
+
+// method id "content.accountshipping.patch":
+
+type AccountshippingPatchCall struct {
+	s               *Service
+	merchantId      uint64
+	accountId       uint64
+	accountshipping *AccountShipping
+	opt_            map[string]interface{}
+}
+
+// Patch: Updates the shipping settings of the account. This method
+// supports patch semantics.
+func (r *AccountshippingService) Patch(merchantId uint64, accountId uint64, accountshipping *AccountShipping) *AccountshippingPatchCall {
+	c := &AccountshippingPatchCall{s: r.s, opt_: make(map[string]interface{})}
+	c.merchantId = merchantId
+	c.accountId = accountId
+	c.accountshipping = accountshipping
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AccountshippingPatchCall) Fields(s ...googleapi.Field) *AccountshippingPatchCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AccountshippingPatchCall) Do() (*AccountShipping, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.accountshipping)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "{merchantId}/accountshipping/{accountId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("PATCH", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"merchantId": strconv.FormatUint(c.merchantId, 10),
+		"accountId":  strconv.FormatUint(c.accountId, 10),
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AccountShipping
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Updates the shipping settings of the account. This method supports patch semantics.",
+	//   "httpMethod": "PATCH",
+	//   "id": "content.accountshipping.patch",
+	//   "parameterOrder": [
+	//     "merchantId",
+	//     "accountId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "The ID of the account for which to get/update account shipping settings.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "merchantId": {
+	//       "description": "The ID of the managing account.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "{merchantId}/accountshipping/{accountId}",
+	//   "request": {
+	//     "$ref": "AccountShipping"
+	//   },
+	//   "response": {
+	//     "$ref": "AccountShipping"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/content"
+	//   ]
+	// }
+
+}
+
+// method id "content.accountshipping.update":
+
+type AccountshippingUpdateCall struct {
+	s               *Service
+	merchantId      uint64
+	accountId       uint64
+	accountshipping *AccountShipping
+	opt_            map[string]interface{}
+}
+
+// Update: Updates the shipping settings of the account.
+func (r *AccountshippingService) Update(merchantId uint64, accountId uint64, accountshipping *AccountShipping) *AccountshippingUpdateCall {
+	c := &AccountshippingUpdateCall{s: r.s, opt_: make(map[string]interface{})}
+	c.merchantId = merchantId
+	c.accountId = accountId
+	c.accountshipping = accountshipping
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AccountshippingUpdateCall) Fields(s ...googleapi.Field) *AccountshippingUpdateCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AccountshippingUpdateCall) Do() (*AccountShipping, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.accountshipping)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "{merchantId}/accountshipping/{accountId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("PUT", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"merchantId": strconv.FormatUint(c.merchantId, 10),
+		"accountId":  strconv.FormatUint(c.accountId, 10),
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AccountShipping
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Updates the shipping settings of the account.",
+	//   "httpMethod": "PUT",
+	//   "id": "content.accountshipping.update",
+	//   "parameterOrder": [
+	//     "merchantId",
+	//     "accountId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "The ID of the account for which to get/update account shipping settings.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "merchantId": {
+	//       "description": "The ID of the managing account.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "{merchantId}/accountshipping/{accountId}",
+	//   "request": {
+	//     "$ref": "AccountShipping"
+	//   },
+	//   "response": {
+	//     "$ref": "AccountShipping"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/content"
+	//   ]
+	// }
+
+}
+
 // method id "content.accountstatuses.custombatch":
 
 type AccountstatusesCustombatchCall struct {
@@ -2147,6 +3016,475 @@
 
 }
 
+// method id "content.accounttax.custombatch":
+
+type AccounttaxCustombatchCall struct {
+	s                            *Service
+	accounttaxcustombatchrequest *AccounttaxCustomBatchRequest
+	opt_                         map[string]interface{}
+}
+
+// Custombatch: Retrieves and updates tax settings of multiple accounts
+// in a single request.
+func (r *AccounttaxService) Custombatch(accounttaxcustombatchrequest *AccounttaxCustomBatchRequest) *AccounttaxCustombatchCall {
+	c := &AccounttaxCustombatchCall{s: r.s, opt_: make(map[string]interface{})}
+	c.accounttaxcustombatchrequest = accounttaxcustombatchrequest
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AccounttaxCustombatchCall) Fields(s ...googleapi.Field) *AccounttaxCustombatchCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AccounttaxCustombatchCall) Do() (*AccounttaxCustomBatchResponse, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.accounttaxcustombatchrequest)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "accounttax/batch")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("POST", urls, body)
+	googleapi.SetOpaque(req.URL)
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AccounttaxCustomBatchResponse
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Retrieves and updates tax settings of multiple accounts in a single request.",
+	//   "httpMethod": "POST",
+	//   "id": "content.accounttax.custombatch",
+	//   "path": "accounttax/batch",
+	//   "request": {
+	//     "$ref": "AccounttaxCustomBatchRequest"
+	//   },
+	//   "response": {
+	//     "$ref": "AccounttaxCustomBatchResponse"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/content"
+	//   ]
+	// }
+
+}
+
+// method id "content.accounttax.get":
+
+type AccounttaxGetCall struct {
+	s          *Service
+	merchantId uint64
+	accountId  uint64
+	opt_       map[string]interface{}
+}
+
+// Get: Retrieves the tax settings of the account.
+func (r *AccounttaxService) Get(merchantId uint64, accountId uint64) *AccounttaxGetCall {
+	c := &AccounttaxGetCall{s: r.s, opt_: make(map[string]interface{})}
+	c.merchantId = merchantId
+	c.accountId = accountId
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AccounttaxGetCall) Fields(s ...googleapi.Field) *AccounttaxGetCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AccounttaxGetCall) Do() (*AccountTax, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "{merchantId}/accounttax/{accountId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"merchantId": strconv.FormatUint(c.merchantId, 10),
+		"accountId":  strconv.FormatUint(c.accountId, 10),
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AccountTax
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Retrieves the tax settings of the account.",
+	//   "httpMethod": "GET",
+	//   "id": "content.accounttax.get",
+	//   "parameterOrder": [
+	//     "merchantId",
+	//     "accountId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "The ID of the account for which to get/update account tax settings.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "merchantId": {
+	//       "description": "The ID of the managing account.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "{merchantId}/accounttax/{accountId}",
+	//   "response": {
+	//     "$ref": "AccountTax"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/content"
+	//   ]
+	// }
+
+}
+
+// method id "content.accounttax.list":
+
+type AccounttaxListCall struct {
+	s          *Service
+	merchantId uint64
+	opt_       map[string]interface{}
+}
+
+// List: Lists the tax settings of the sub-accounts in your Merchant
+// Center account.
+func (r *AccounttaxService) List(merchantId uint64) *AccounttaxListCall {
+	c := &AccounttaxListCall{s: r.s, opt_: make(map[string]interface{})}
+	c.merchantId = merchantId
+	return c
+}
+
+// MaxResults sets the optional parameter "maxResults": The maximum
+// number of tax settings to return in the response, used for paging.
+func (c *AccounttaxListCall) MaxResults(maxResults int64) *AccounttaxListCall {
+	c.opt_["maxResults"] = maxResults
+	return c
+}
+
+// PageToken sets the optional parameter "pageToken": The token returned
+// by the previous request.
+func (c *AccounttaxListCall) PageToken(pageToken string) *AccounttaxListCall {
+	c.opt_["pageToken"] = pageToken
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AccounttaxListCall) Fields(s ...googleapi.Field) *AccounttaxListCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AccounttaxListCall) Do() (*AccounttaxListResponse, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["maxResults"]; ok {
+		params.Set("maxResults", fmt.Sprintf("%v", v))
+	}
+	if v, ok := c.opt_["pageToken"]; ok {
+		params.Set("pageToken", fmt.Sprintf("%v", v))
+	}
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "{merchantId}/accounttax")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"merchantId": strconv.FormatUint(c.merchantId, 10),
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AccounttaxListResponse
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Lists the tax settings of the sub-accounts in your Merchant Center account.",
+	//   "httpMethod": "GET",
+	//   "id": "content.accounttax.list",
+	//   "parameterOrder": [
+	//     "merchantId"
+	//   ],
+	//   "parameters": {
+	//     "maxResults": {
+	//       "description": "The maximum number of tax settings to return in the response, used for paging.",
+	//       "format": "uint32",
+	//       "location": "query",
+	//       "type": "integer"
+	//     },
+	//     "merchantId": {
+	//       "description": "The ID of the managing account.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "pageToken": {
+	//       "description": "The token returned by the previous request.",
+	//       "location": "query",
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "{merchantId}/accounttax",
+	//   "response": {
+	//     "$ref": "AccounttaxListResponse"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/content"
+	//   ]
+	// }
+
+}
+
+// method id "content.accounttax.patch":
+
+type AccounttaxPatchCall struct {
+	s          *Service
+	merchantId uint64
+	accountId  uint64
+	accounttax *AccountTax
+	opt_       map[string]interface{}
+}
+
+// Patch: Updates the tax settings of the account. This method supports
+// patch semantics.
+func (r *AccounttaxService) Patch(merchantId uint64, accountId uint64, accounttax *AccountTax) *AccounttaxPatchCall {
+	c := &AccounttaxPatchCall{s: r.s, opt_: make(map[string]interface{})}
+	c.merchantId = merchantId
+	c.accountId = accountId
+	c.accounttax = accounttax
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AccounttaxPatchCall) Fields(s ...googleapi.Field) *AccounttaxPatchCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AccounttaxPatchCall) Do() (*AccountTax, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.accounttax)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "{merchantId}/accounttax/{accountId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("PATCH", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"merchantId": strconv.FormatUint(c.merchantId, 10),
+		"accountId":  strconv.FormatUint(c.accountId, 10),
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AccountTax
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Updates the tax settings of the account. This method supports patch semantics.",
+	//   "httpMethod": "PATCH",
+	//   "id": "content.accounttax.patch",
+	//   "parameterOrder": [
+	//     "merchantId",
+	//     "accountId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "The ID of the account for which to get/update account tax settings.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "merchantId": {
+	//       "description": "The ID of the managing account.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "{merchantId}/accounttax/{accountId}",
+	//   "request": {
+	//     "$ref": "AccountTax"
+	//   },
+	//   "response": {
+	//     "$ref": "AccountTax"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/content"
+	//   ]
+	// }
+
+}
+
+// method id "content.accounttax.update":
+
+type AccounttaxUpdateCall struct {
+	s          *Service
+	merchantId uint64
+	accountId  uint64
+	accounttax *AccountTax
+	opt_       map[string]interface{}
+}
+
+// Update: Updates the tax settings of the account.
+func (r *AccounttaxService) Update(merchantId uint64, accountId uint64, accounttax *AccountTax) *AccounttaxUpdateCall {
+	c := &AccounttaxUpdateCall{s: r.s, opt_: make(map[string]interface{})}
+	c.merchantId = merchantId
+	c.accountId = accountId
+	c.accounttax = accounttax
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AccounttaxUpdateCall) Fields(s ...googleapi.Field) *AccounttaxUpdateCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AccounttaxUpdateCall) Do() (*AccountTax, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.accounttax)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "{merchantId}/accounttax/{accountId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("PUT", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"merchantId": strconv.FormatUint(c.merchantId, 10),
+		"accountId":  strconv.FormatUint(c.accountId, 10),
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AccountTax
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Updates the tax settings of the account.",
+	//   "httpMethod": "PUT",
+	//   "id": "content.accounttax.update",
+	//   "parameterOrder": [
+	//     "merchantId",
+	//     "accountId"
+	//   ],
+	//   "parameters": {
+	//     "accountId": {
+	//       "description": "The ID of the account for which to get/update account tax settings.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     },
+	//     "merchantId": {
+	//       "description": "The ID of the managing account.",
+	//       "format": "uint64",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "{merchantId}/accounttax/{accountId}",
+	//   "request": {
+	//     "$ref": "AccountTax"
+	//   },
+	//   "response": {
+	//     "$ref": "AccountTax"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/content"
+	//   ]
+	// }
+
+}
+
 // method id "content.datafeeds.custombatch":
 
 type DatafeedsCustombatchCall struct {
diff --git a/coordinate/v1/coordinate-gen.go b/coordinate/v1/coordinate-gen.go
index 8fa9991..df0f1cc 100644
--- a/coordinate/v1/coordinate-gen.go
+++ b/coordinate/v1/coordinate-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "coordinate:v1"
 const apiName = "coordinate"
diff --git a/customsearch/v1/customsearch-gen.go b/customsearch/v1/customsearch-gen.go
index 7249564..60c3ff5 100644
--- a/customsearch/v1/customsearch-gen.go
+++ b/customsearch/v1/customsearch-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "customsearch:v1"
 const apiName = "customsearch"
diff --git a/dataflow/v1beta3/dataflow-gen.go b/dataflow/v1beta3/dataflow-gen.go
index 65ed334..4f10a23 100644
--- a/dataflow/v1beta3/dataflow-gen.go
+++ b/dataflow/v1beta3/dataflow-gen.go
@@ -12,6 +12,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -31,6 +32,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "dataflow:v1beta3"
 const apiName = "dataflow"
diff --git a/datastore/v1beta1/datastore-gen.go b/datastore/v1beta1/datastore-gen.go
index b395111..495f976 100644
--- a/datastore/v1beta1/datastore-gen.go
+++ b/datastore/v1beta1/datastore-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "datastore:v1beta1"
 const apiName = "datastore"
diff --git a/datastore/v1beta2/datastore-gen.go b/datastore/v1beta2/datastore-gen.go
index 7b76955..c1a5850 100644
--- a/datastore/v1beta2/datastore-gen.go
+++ b/datastore/v1beta2/datastore-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "datastore:v1beta2"
 const apiName = "datastore"
diff --git a/deploymentmanager/v2beta1/deploymentmanager-api.json b/deploymentmanager/v2beta1/deploymentmanager-api.json
index 69e0530..558baa6 100644
--- a/deploymentmanager/v2beta1/deploymentmanager-api.json
+++ b/deploymentmanager/v2beta1/deploymentmanager-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/tIZyL4noDNHqAhtRC9eDpHIzKDI\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/uG7F4p-Yx8ObH2FX1BUUb9Dm9wQ\"",
  "discoveryVersion": "v1",
  "id": "deploymentmanager:v2beta1",
  "name": "deploymentmanager",
  "version": "v2beta1",
- "revision": "20141113",
+ "revision": "20141112",
  "title": "Google Cloud Deployment Manager API V2",
  "description": "The Deployment Manager API allows users to declaratively configure, deploy and run complex solutions on the Google Cloud Platform.",
  "ownerDomain": "google.com",
diff --git a/deploymentmanager/v2beta1/deploymentmanager-gen.go b/deploymentmanager/v2beta1/deploymentmanager-gen.go
index 6744407..89790ba 100644
--- a/deploymentmanager/v2beta1/deploymentmanager-gen.go
+++ b/deploymentmanager/v2beta1/deploymentmanager-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "deploymentmanager:v2beta1"
 const apiName = "deploymentmanager"
diff --git a/dfareporting/v1.1/dfareporting-gen.go b/dfareporting/v1.1/dfareporting-gen.go
index 8377d0c..adcc259 100644
--- a/dfareporting/v1.1/dfareporting-gen.go
+++ b/dfareporting/v1.1/dfareporting-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "dfareporting:v1.1"
 const apiName = "dfareporting"
diff --git a/dfareporting/v1.2/dfareporting-gen.go b/dfareporting/v1.2/dfareporting-gen.go
index 2fa4533..7edb783 100644
--- a/dfareporting/v1.2/dfareporting-gen.go
+++ b/dfareporting/v1.2/dfareporting-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "dfareporting:v1.2"
 const apiName = "dfareporting"
diff --git a/dfareporting/v1.3/dfareporting-gen.go b/dfareporting/v1.3/dfareporting-gen.go
index 965721c..638d912 100644
--- a/dfareporting/v1.3/dfareporting-gen.go
+++ b/dfareporting/v1.3/dfareporting-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "dfareporting:v1.3"
 const apiName = "dfareporting"
diff --git a/dfareporting/v1/dfareporting-gen.go b/dfareporting/v1/dfareporting-gen.go
index 7ba9c0b..68084ed 100644
--- a/dfareporting/v1/dfareporting-gen.go
+++ b/dfareporting/v1/dfareporting-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "dfareporting:v1"
 const apiName = "dfareporting"
diff --git a/dfareporting/v2.0/dfareporting-gen.go b/dfareporting/v2.0/dfareporting-gen.go
index a0fbdc9..b39d7e4 100644
--- a/dfareporting/v2.0/dfareporting-gen.go
+++ b/dfareporting/v2.0/dfareporting-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "dfareporting:v2.0"
 const apiName = "dfareporting"
@@ -11391,6 +11393,10 @@
 	creativeassetmetadata *CreativeAssetMetadata
 	opt_                  map[string]interface{}
 	media_                io.Reader
+	resumable_            googleapi.SizeReaderAt
+	mediaType_            string
+	ctx_                  context.Context
+	protocol_             string
 }
 
 // Insert: Inserts a new creative asset.
@@ -11401,8 +11407,32 @@
 	c.creativeassetmetadata = creativeassetmetadata
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *CreativeAssetsInsertCall) Media(r io.Reader) *CreativeAssetsInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *CreativeAssetsInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *CreativeAssetsInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *CreativeAssetsInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *CreativeAssetsInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -11427,21 +11457,43 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "userprofiles/{profileId}/creativeAssets/{advertiserId}/creativeAssets")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"profileId":    strconv.FormatInt(c.profileId, 10),
 		"advertiserId": strconv.FormatInt(c.advertiserId, 10),
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -11451,6 +11503,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *CreativeAssetMetadata
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
diff --git a/discovery/v1/discovery-gen.go b/discovery/v1/discovery-gen.go
index 6810eaf..1ff39cb 100644
--- a/discovery/v1/discovery-gen.go
+++ b/discovery/v1/discovery-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "discovery:v1"
 const apiName = "discovery"
diff --git a/dns/v1beta1/dns-gen.go b/dns/v1beta1/dns-gen.go
index 9f8ea36..cf80a29 100644
--- a/dns/v1beta1/dns-gen.go
+++ b/dns/v1beta1/dns-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "dns:v1beta1"
 const apiName = "dns"
diff --git a/doubleclickbidmanager/v1/doubleclickbidmanager-gen.go b/doubleclickbidmanager/v1/doubleclickbidmanager-gen.go
index c6e0462..df0b90a 100644
--- a/doubleclickbidmanager/v1/doubleclickbidmanager-gen.go
+++ b/doubleclickbidmanager/v1/doubleclickbidmanager-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "doubleclickbidmanager:v1"
 const apiName = "doubleclickbidmanager"
diff --git a/doubleclicksearch/v2/doubleclicksearch-api.json b/doubleclicksearch/v2/doubleclicksearch-api.json
index b46d642..9b69515 100644
--- a/doubleclicksearch/v2/doubleclicksearch-api.json
+++ b/doubleclicksearch/v2/doubleclicksearch-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/1ZhpxHDNubbx8bryhI0Ks6XcxaA\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/cxvPCR_xcZiPo49FRoYXODxx8IM\"",
  "discoveryVersion": "v1",
  "id": "doubleclicksearch:v2",
  "name": "doubleclicksearch",
  "version": "v2",
- "revision": "20141113",
+ "revision": "20141222",
  "title": "DoubleClick Search API",
  "description": "Report and modify your advertising data in DoubleClick Search (for example, campaigns, ad groups, keywords, and conversions).",
  "ownerDomain": "google.com",
diff --git a/doubleclicksearch/v2/doubleclicksearch-gen.go b/doubleclicksearch/v2/doubleclicksearch-gen.go
index c191a56..d512dcc 100644
--- a/doubleclicksearch/v2/doubleclicksearch-gen.go
+++ b/doubleclicksearch/v2/doubleclicksearch-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "doubleclicksearch:v2"
 const apiName = "doubleclicksearch"
diff --git a/drive/v1/drive-api.json b/drive/v1/drive-api.json
index 1af95c4..95436ff 100644
--- a/drive/v1/drive-api.json
+++ b/drive/v1/drive-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/xtpwrL4RwrHHosQq_H1kzUTVEH8\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/L-3fUMUnxAbeUiUzcuNcz8B5ukA\"",
  "discoveryVersion": "v1",
  "id": "drive:v1",
  "name": "drive",
  "version": "v1",
- "revision": "20141201",
+ "revision": "20141212",
  "title": "Drive API",
  "description": "The API to interact with Drive.",
  "ownerDomain": "google.com",
diff --git a/drive/v1/drive-gen.go b/drive/v1/drive-gen.go
index 2e57124..dbc3134 100644
--- a/drive/v1/drive-gen.go
+++ b/drive/v1/drive-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "drive:v1"
 const apiName = "drive"
@@ -307,10 +309,14 @@
 // method id "drive.files.insert":
 
 type FilesInsertCall struct {
-	s      *Service
-	file   *File
-	opt_   map[string]interface{}
-	media_ io.Reader
+	s          *Service
+	file       *File
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Inserts a file, and any settable metadata or blob content
@@ -320,8 +326,32 @@
 	c.file = file
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *FilesInsertCall) Media(r io.Reader) *FilesInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *FilesInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *FilesInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *FilesInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *FilesInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -346,18 +376,40 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "files")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.SetOpaque(req.URL)
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -367,6 +419,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *File
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -553,11 +620,15 @@
 // method id "drive.files.update":
 
 type FilesUpdateCall struct {
-	s      *Service
-	id     string
-	file   *File
-	opt_   map[string]interface{}
-	media_ io.Reader
+	s          *Service
+	id         string
+	file       *File
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Update: Updates file metadata and/or content
@@ -595,8 +666,32 @@
 	c.opt_["updateViewedDate"] = updateViewedDate
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *FilesUpdateCall) Media(r io.Reader) *FilesUpdateCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *FilesUpdateCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *FilesUpdateCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *FilesUpdateCall) ProgressUpdater(pu googleapi.ProgressUpdater) *FilesUpdateCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -630,20 +725,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "files/{id}")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("PUT", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"id": c.id,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -653,6 +770,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *File
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
diff --git a/drive/v2/drive-api.json b/drive/v2/drive-api.json
index a266809..3e9e579 100644
--- a/drive/v2/drive-api.json
+++ b/drive/v2/drive-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/AHEGWruKyBAhYtcPNc0aloAxe6E\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/Tndtd0xUqwZ7nEywvF_TXbq6aYo\"",
  "discoveryVersion": "v1",
  "id": "drive:v2",
  "name": "drive",
  "version": "v2",
- "revision": "20141201",
+ "revision": "20141212",
  "title": "Drive API",
  "description": "The API to interact with Drive.",
  "ownerDomain": "google.com",
@@ -178,6 +178,13 @@
       }
      }
     },
+    "folderColorPalette": {
+     "type": "array",
+     "description": "The palette of allowable folder colors as RGB hex strings.",
+     "items": {
+      "type": "string"
+     }
+    },
     "importFormats": {
      "type": "array",
      "description": "The allowable import formats.",
@@ -935,6 +942,10 @@
      "description": "The size of the file in bytes. This is only populated for files with content stored in Drive.",
      "format": "int64"
     },
+    "folderColorRgb": {
+     "type": "string",
+     "description": "Folder color as an RGB hex string if the file is a folder. The list of supported colors is available in the folderColorPalette field of the About resource. If an unsupported color is specified, it will be changed to the closest color in the palette."
+    },
     "headRevisionId": {
      "type": "string",
      "description": "The ID of the file's head revision. This will only be populated for files with content stored in Drive."
diff --git a/drive/v2/drive-gen.go b/drive/v2/drive-gen.go
index 69fed39..aeefa74 100644
--- a/drive/v2/drive-gen.go
+++ b/drive/v2/drive-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "drive:v2"
 const apiName = "drive"
@@ -250,6 +252,10 @@
 	// Features: List of additional features enabled on this account.
 	Features []*AboutFeatures `json:"features,omitempty"`
 
+	// FolderColorPalette: The palette of allowable folder colors as RGB hex
+	// strings.
+	FolderColorPalette []string `json:"folderColorPalette,omitempty"`
+
 	// ImportFormats: The allowable import formats.
 	ImportFormats []*AboutImportFormats `json:"importFormats,omitempty"`
 
@@ -810,6 +816,13 @@
 	// files with content stored in Drive.
 	FileSize int64 `json:"fileSize,omitempty,string"`
 
+	// FolderColorRgb: Folder color as an RGB hex string if the file is a
+	// folder. The list of supported colors is available in the
+	// folderColorPalette field of the About resource. If an unsupported
+	// color is specified, it will be changed to the closest color in the
+	// palette.
+	FolderColorRgb string `json:"folderColorRgb,omitempty"`
+
 	// HeadRevisionId: The ID of the file's head revision. This will only be
 	// populated for files with content stored in Drive.
 	HeadRevisionId string `json:"headRevisionId,omitempty"`
@@ -3630,10 +3643,14 @@
 // method id "drive.files.insert":
 
 type FilesInsertCall struct {
-	s      *Service
-	file   *File
-	opt_   map[string]interface{}
-	media_ io.Reader
+	s          *Service
+	file       *File
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Insert a new file.
@@ -3700,8 +3717,32 @@
 	c.opt_["visibility"] = visibility
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *FilesInsertCall) Media(r io.Reader) *FilesInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *FilesInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *FilesInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *FilesInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *FilesInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -3750,18 +3791,40 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "files")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.SetOpaque(req.URL)
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -3771,6 +3834,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *File
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -4554,11 +4632,15 @@
 // method id "drive.files.update":
 
 type FilesUpdateCall struct {
-	s      *Service
-	fileId string
-	file   *File
-	opt_   map[string]interface{}
-	media_ io.Reader
+	s          *Service
+	fileId     string
+	file       *File
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Update: Updates file metadata and/or content.
@@ -4656,8 +4738,32 @@
 	c.opt_["useContentAsIndexableText"] = useContentAsIndexableText
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *FilesUpdateCall) Media(r io.Reader) *FilesUpdateCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *FilesUpdateCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *FilesUpdateCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *FilesUpdateCall) ProgressUpdater(pu googleapi.ProgressUpdater) *FilesUpdateCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -4718,20 +4824,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "files/{fileId}")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("PUT", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"fileId": c.fileId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -4741,6 +4869,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *File
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -6731,10 +6874,14 @@
 // method id "drive.realtime.update":
 
 type RealtimeUpdateCall struct {
-	s      *Service
-	fileId string
-	opt_   map[string]interface{}
-	media_ io.Reader
+	s          *Service
+	fileId     string
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Update: Overwrites the Realtime API data model associated with this
@@ -6755,8 +6902,32 @@
 	c.opt_["baseRevision"] = baseRevision
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *RealtimeUpdateCall) Media(r io.Reader) *RealtimeUpdateCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *RealtimeUpdateCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *RealtimeUpdateCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *RealtimeUpdateCall) ProgressUpdater(pu googleapi.ProgressUpdater) *RealtimeUpdateCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -6779,22 +6950,44 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "files/{fileId}/realtime")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("PUT", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"fileId": c.fileId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -6804,6 +6997,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return err
+		}
+	}
 	return nil
 	// {
 	//   "description": "Overwrites the Realtime API data model associated with this file with the provided JSON data model.",
diff --git a/fitness/v1/fitness-gen.go b/fitness/v1/fitness-gen.go
index 35c9c73..5001194 100644
--- a/fitness/v1/fitness-gen.go
+++ b/fitness/v1/fitness-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "fitness:v1"
 const apiName = "fitness"
diff --git a/freebase/v1-sandbox/freebase-gen.go b/freebase/v1-sandbox/freebase-gen.go
index c803c90..37a5ec0 100644
--- a/freebase/v1-sandbox/freebase-gen.go
+++ b/freebase/v1-sandbox/freebase-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "freebase:v1-sandbox"
 const apiName = "freebase"
diff --git a/freebase/v1/freebase-gen.go b/freebase/v1/freebase-gen.go
index eeb89b6..5dc866b 100644
--- a/freebase/v1/freebase-gen.go
+++ b/freebase/v1/freebase-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "freebase:v1"
 const apiName = "freebase"
diff --git a/freebase/v1sandbox/freebase-gen.go b/freebase/v1sandbox/freebase-gen.go
index 48edc5b..ed1a783 100644
--- a/freebase/v1sandbox/freebase-gen.go
+++ b/freebase/v1sandbox/freebase-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "freebase:v1sandbox"
 const apiName = "freebase"
diff --git a/games/v1/games-api.json b/games/v1/games-api.json
index f74b596..f3c3449 100644
--- a/games/v1/games-api.json
+++ b/games/v1/games-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/hKogzlFCeoQ2aH_L_DGXt68-_r4\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/Lk2G3wqb0mjcAChQC6sL6CdXNgs\"",
  "discoveryVersion": "v1",
  "id": "games:v1",
  "name": "games",
  "canonicalName": "Games",
  "version": "v1",
- "revision": "20150105",
+ "revision": "20150119",
  "title": "Google Play Game Services API",
  "description": "The API for Google Play Game Services.",
  "ownerDomain": "google.com",
diff --git a/games/v1/games-gen.go b/games/v1/games-gen.go
index 70aa6f9..88a1fea 100644
--- a/games/v1/games-gen.go
+++ b/games/v1/games-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "games:v1"
 const apiName = "games"
diff --git a/gamesconfiguration/v1configuration/gamesconfiguration-api.json b/gamesconfiguration/v1configuration/gamesconfiguration-api.json
index dceebc5..eb48c03 100644
--- a/gamesconfiguration/v1configuration/gamesconfiguration-api.json
+++ b/gamesconfiguration/v1configuration/gamesconfiguration-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/K5Ih1bL_RaiKMVVYQoDzwVBhmUA\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/Jkr0IKioflJu_aUgxoI27vUdCqY\"",
  "discoveryVersion": "v1",
  "id": "gamesConfiguration:v1configuration",
  "name": "gamesConfiguration",
  "canonicalName": "Games Configuration",
  "version": "v1configuration",
- "revision": "20150105",
+ "revision": "20150119",
  "title": "Google Play Game Services Publishing API",
  "description": "The Publishing API for Google Play Game Services.",
  "ownerDomain": "google.com",
diff --git a/gamesconfiguration/v1configuration/gamesconfiguration-gen.go b/gamesconfiguration/v1configuration/gamesconfiguration-gen.go
index da77910..8a33c47 100644
--- a/gamesconfiguration/v1configuration/gamesconfiguration-gen.go
+++ b/gamesconfiguration/v1configuration/gamesconfiguration-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "gamesConfiguration:v1configuration"
 const apiName = "gamesConfiguration"
@@ -852,6 +854,10 @@
 	imageType  string
 	opt_       map[string]interface{}
 	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Upload: Uploads an image for a resource with the given ID and image
@@ -862,8 +868,32 @@
 	c.imageType = imageType
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *ImageConfigurationsUploadCall) Media(r io.Reader) *ImageConfigurationsUploadCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *ImageConfigurationsUploadCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *ImageConfigurationsUploadCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *ImageConfigurationsUploadCall) ProgressUpdater(pu googleapi.ProgressUpdater) *ImageConfigurationsUploadCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -883,23 +913,45 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "images/{resourceId}/imageType/{imageType}")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"resourceId": c.resourceId,
 		"imageType":  c.imageType,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -909,6 +961,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *ImageConfiguration
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
diff --git a/gamesmanagement/v1management/gamesmanagement-api.json b/gamesmanagement/v1management/gamesmanagement-api.json
index d54c2ac..2dc4549 100644
--- a/gamesmanagement/v1management/gamesmanagement-api.json
+++ b/gamesmanagement/v1management/gamesmanagement-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/8gQ_PlllzRxle0RG1v4iwWqEmU4\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/6aiv7cDwqgyYEVRSZQkoMExBeXA\"",
  "discoveryVersion": "v1",
  "id": "gamesManagement:v1management",
  "name": "gamesManagement",
  "canonicalName": "Games Management",
  "version": "v1management",
- "revision": "20150105",
+ "revision": "20150119",
  "title": "Google Play Game Services Management API",
  "description": "The Management API for Google Play Game Services.",
  "ownerDomain": "google.com",
diff --git a/gamesmanagement/v1management/gamesmanagement-gen.go b/gamesmanagement/v1management/gamesmanagement-gen.go
index 09b6928..16f8346 100644
--- a/gamesmanagement/v1management/gamesmanagement-gen.go
+++ b/gamesmanagement/v1management/gamesmanagement-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "gamesManagement:v1management"
 const apiName = "gamesManagement"
diff --git a/gan/v1beta1/gan-gen.go b/gan/v1beta1/gan-gen.go
index 79ebb15..b84801d 100644
--- a/gan/v1beta1/gan-gen.go
+++ b/gan/v1beta1/gan-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "gan:v1beta1"
 const apiName = "gan"
diff --git a/genomics/v1beta/genomics-api.json b/genomics/v1beta/genomics-api.json
index 7d30cba..154af82 100644
--- a/genomics/v1beta/genomics-api.json
+++ b/genomics/v1beta/genomics-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/6ml16nkfiebLShEb_izFUqhiTAg\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/bPDqLaAkd2IdqCX0_JVWgMmdl14\"",
  "discoveryVersion": "v1",
  "id": "genomics:v1beta",
  "name": "genomics",
  "version": "v1beta",
- "revision": "20150109",
+ "revision": "20150106",
  "title": "Genomics API",
  "description": "Provides access to Genomics data.",
  "ownerDomain": "google.com",
diff --git a/genomics/v1beta/genomics-gen.go b/genomics/v1beta/genomics-gen.go
index 419a97e..83f74fb 100644
--- a/genomics/v1beta/genomics-gen.go
+++ b/genomics/v1beta/genomics-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "genomics:v1beta"
 const apiName = "genomics"
diff --git a/genomics/v1beta2/genomics-api.json b/genomics/v1beta2/genomics-api.json
index 7e953ec..82c8f1f 100644
--- a/genomics/v1beta2/genomics-api.json
+++ b/genomics/v1beta2/genomics-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/bWh0LiO6dtNe4IUMpiPkMfyIZWI\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/Z_eqiuigpQLBbFfU2_Hg5TLtnJc\"",
  "discoveryVersion": "v1",
  "id": "genomics:v1beta2",
  "name": "genomics",
  "version": "v1beta2",
- "revision": "20150109",
+ "revision": "20150106",
  "title": "Genomics API",
  "description": "Provides access to Genomics data.",
  "ownerDomain": "google.com",
@@ -93,7 +93,7 @@
    "properties": {
     "bamSourceUris": {
      "type": "array",
-     "description": "The BAM source files for alignment. Exactly one of readGroupSetIds, bamSourceUris, interleavedFastqSource or pairedFastqSource must be provided. The caller must have READ permissions for these files.",
+     "description": "The BAM source files for alignment. Exactly one of readGroupSetId, bamSourceUris, interleavedFastqSource or pairedFastqSource must be provided. The caller must have READ permissions for these files.",
      "items": {
       "type": "string"
      }
@@ -104,18 +104,15 @@
     },
     "interleavedFastqSource": {
      "$ref": "InterleavedFastqSource",
-     "description": "The interleaved FASTQ source files for alignment, where both members of each pair of reads are found on consecutive records within the same FASTQ file. Exactly one of readGroupSetIds, bamSourceUris, interleavedFastqSource or pairedFastqSource must be provided."
+     "description": "The interleaved FASTQ source files for alignment, where both members of each pair of reads are found on consecutive records within the same FASTQ file. Exactly one of readGroupSetId, bamSourceUris, interleavedFastqSource or pairedFastqSource must be provided."
     },
     "pairedFastqSource": {
      "$ref": "PairedFastqSource",
-     "description": "The paired end FASTQ source files for alignment, where each member of a pair of reads are found in separate files. Exactly one of readGroupSetIds, bamSourceUris, interleavedFastqSource or pairedFastqSource must be provided."
+     "description": "The paired end FASTQ source files for alignment, where each member of a pair of reads are found in separate files. Exactly one of readGroupSetId, bamSourceUris, interleavedFastqSource or pairedFastqSource must be provided."
     },
-    "readGroupSetIds": {
-     "type": "array",
-     "description": "The IDs of the read group sets which will be aligned. New read group sets will be generated to hold the aligned data, the originals will not be modified. The caller must have READ permissions for these read group sets. Exactly one of readGroupSetIds, bamSourceUris, interleavedFastqSource or pairedFastqSource must be provided.",
-     "items": {
-      "type": "string"
-     }
+    "readGroupSetId": {
+     "type": "string",
+     "description": "The ID of the read group set which will be aligned. A new read group set will be generated to hold the aligned data, the originals will not be modified. The caller must have READ permissions for this read group set. Exactly one of readGroupSetId, bamSourceUris, interleavedFastqSource or pairedFastqSource must be provided."
     }
    }
   },
@@ -131,6 +128,120 @@
     }
    }
   },
+  "Annotation": {
+   "id": "Annotation",
+   "type": "object",
+   "description": "An annotation describes a region of reference genome. The value of an annotation may be one of several canonical types, supplemented by arbitrary info tags. A variant annotation is represented by one or more of these canonical types. An annotation is not inherently associated with a specific sample/individual (though a client could choose to use annotations in this way). Example canonical annotation types are 'Gene' and 'Variant'.",
+   "externalTypeName": "cloud.genomics.annotations.Annotation",
+   "properties": {
+    "annotationSetId": {
+     "type": "string",
+     "description": "The ID of the containing annotation set."
+    },
+    "id": {
+     "type": "string",
+     "description": "The generated unique ID for this annotation."
+    },
+    "info": {
+     "type": "object",
+     "description": "A map of additional data for this annotation.",
+     "additionalProperties": {
+      "type": "array",
+      "description": "A string which maps to an array of values.",
+      "items": {
+       "type": "string"
+      }
+     }
+    },
+    "name": {
+     "type": "string",
+     "description": "The display name of this annotation."
+    },
+    "position": {
+     "$ref": "RangePosition",
+     "description": "The position of this annotation on the reference sequence."
+    },
+    "transcript": {
+     "$ref": "Transcript",
+     "description": "A transcript value represents the assertion that a particular region of the reference genome may be transcribed as RNA. An alternate splicing pattern would be represented as separate transcript object. This field is only set for annotations of type TRANSCRIPT."
+    },
+    "type": {
+     "type": "string",
+     "description": "The data type for this annotation. Must match the containing annotation set's type.",
+     "enum": [
+      "GENE",
+      "GENERIC",
+      "TRANSCRIPT",
+      "VARIANT"
+     ],
+     "enumDescriptions": [
+      "",
+      "",
+      "",
+      ""
+     ]
+    },
+    "variant": {
+     "$ref": "VariantAnnotation",
+     "description": "A variant annotation which describes the effect of a variant on the genome, the coding sequence, and/or higher level consequences at the organism level e.g. pathogenicity. This field is only set for annotations of type VARIANT."
+    }
+   }
+  },
+  "AnnotationSet": {
+   "id": "AnnotationSet",
+   "type": "object",
+   "description": "An annotation set is a logical grouping of annotations which share consistent type information and provenance. An example would be 'all genes from refseq', or 'all variant annotations from ClinVar'.",
+   "externalTypeName": "cloud.genomics.annotations.AnnotationSet",
+   "properties": {
+    "datasetId": {
+     "type": "string",
+     "description": "The ID of the containing dataset."
+    },
+    "id": {
+     "type": "string",
+     "description": "The generated unique ID for this annotation set."
+    },
+    "info": {
+     "type": "object",
+     "description": "A map of additional data for this annotation set.",
+     "additionalProperties": {
+      "type": "array",
+      "description": "A string which maps to an array of values.",
+      "items": {
+       "type": "string"
+      }
+     }
+    },
+    "name": {
+     "type": "string",
+     "description": "The display name for this annotation set."
+    },
+    "referenceSetId": {
+     "type": "string",
+     "description": "The ID of the reference set which defines the coordinate-space for this set's annotations."
+    },
+    "sourceUri": {
+     "type": "string",
+     "description": "The source URI describing the file from which this annotation set was generated, if any."
+    },
+    "type": {
+     "type": "string",
+     "description": "The type of annotations contained within this set.",
+     "enum": [
+      "GENE",
+      "GENERIC",
+      "TRANSCRIPT",
+      "VARIANT"
+     ],
+     "enumDescriptions": [
+      "",
+      "",
+      "",
+      ""
+     ]
+    }
+   }
+  },
   "Call": {
    "id": "Call",
    "type": "object",
@@ -188,16 +299,13 @@
      "type": "string",
      "description": "Required. The ID of the dataset the called variants will belong to. The caller must have WRITE permissions to this dataset."
     },
-    "readGroupSetIds": {
-     "type": "array",
-     "description": "The IDs of the read group sets which will be called. The caller must have READ permissions for these read group sets. One of readGroupSetIds or sourceUris must be provided.",
-     "items": {
-      "type": "string"
-     }
+    "readGroupSetId": {
+     "type": "string",
+     "description": "The IDs of the read group sets which will be called. The caller must have READ permissions for these read group sets. One of readGroupSetId or sourceUris must be provided."
     },
     "sourceUris": {
      "type": "array",
-     "description": "A list of URIs pointing at BAM files in Google Cloud Storage which will be called. FASTQ files are not allowed. The caller must have READ permissions for these files. One of readGroupSetIds or sourceUris must be provided.",
+     "description": "A list of URIs pointing at BAM files in Google Cloud Storage which will be called. FASTQ files are not allowed. The caller must have READ permissions for these files. One of readGroupSetId or sourceUris must be provided.",
      "items": {
       "type": "string"
      }
@@ -487,6 +595,22 @@
     }
    }
   },
+  "ExternalId": {
+   "id": "ExternalId",
+   "type": "object",
+   "description": "",
+   "externalTypeName": "cloud.genomics.annotations.ExternalId",
+   "properties": {
+    "id": {
+     "type": "string",
+     "description": "The id used by the source of this data."
+    },
+    "sourceName": {
+     "type": "string",
+     "description": "The name of the source of this data."
+    }
+   }
+  },
   "FastqMetadata": {
    "id": "FastqMetadata",
    "type": "object",
@@ -600,6 +724,19 @@
     }
    }
   },
+  "Int32Value": {
+   "id": "Int32Value",
+   "type": "object",
+   "description": "Wrapper message for int32.",
+   "externalTypeName": "google.protobuf.Int32Value",
+   "properties": {
+    "value": {
+     "type": "integer",
+     "description": "The int32 value.",
+     "format": "int32"
+    }
+   }
+  },
   "InterleavedFastqSource": {
    "id": "InterleavedFastqSource",
    "type": "object",
@@ -944,6 +1081,32 @@
     }
    }
   },
+  "QueryRange": {
+   "id": "QueryRange",
+   "type": "object",
+   "description": "A 0-based half-open genomic coordinate range for search requests.",
+   "externalTypeName": "cloud.genomics.common3.QueryRange",
+   "properties": {
+    "end": {
+     "type": "string",
+     "description": "The end position of the range on the reference, 0-based exclusive. If specified, referenceId or referenceName must also be specified. If unset or 0, defaults to the length of the reference.",
+     "format": "int64"
+    },
+    "referenceId": {
+     "type": "string",
+     "description": "The ID of the reference to query. At most one of referenceId and referenceName should be specified."
+    },
+    "referenceName": {
+     "type": "string",
+     "description": "The name of the reference to query, within the reference set associated with this query. At most one of referenceId and referenceName pshould be specified."
+    },
+    "start": {
+     "type": "string",
+     "description": "The start position of the range on the reference, 0-based inclusive. If specified, referenceId or referenceName must also be specified. Defaults to 0.",
+     "format": "int64"
+    }
+   }
+  },
   "Range": {
    "id": "Range",
    "type": "object",
@@ -966,6 +1129,36 @@
     }
    }
   },
+  "RangePosition": {
+   "id": "RangePosition",
+   "type": "object",
+   "description": "A 0-based half-open genomic coordinate range over a reference sequence, for representing the position of a genomic resource.",
+   "externalTypeName": "cloud.genomics.common3.RangePosition",
+   "properties": {
+    "end": {
+     "type": "string",
+     "description": "The end position of the range on the reference, 0-based exclusive.",
+     "format": "int64"
+    },
+    "referenceId": {
+     "type": "string",
+     "description": "The ID of the Google Genomics reference associated with this range."
+    },
+    "referenceName": {
+     "type": "string",
+     "description": "The display name corresponding to the reference specified by referenceId, for example chr1, 1, or chrX."
+    },
+    "reverseStrand": {
+     "type": "boolean",
+     "description": "Whether this range refers to the reverse strand, as opposed to the forward strand. Note that regardless of this field, the start/end position of the range always refer to the forward strand."
+    },
+    "start": {
+     "type": "string",
+     "description": "The start position of the range on the reference, 0-based inclusive.",
+     "format": "int64"
+    }
+   }
+  },
   "Read": {
    "id": "Read",
    "type": "object",
@@ -1316,6 +1509,119 @@
     }
    }
   },
+  "SearchAnnotationSetsRequest": {
+   "id": "SearchAnnotationSetsRequest",
+   "type": "object",
+   "externalTypeName": "cloud.genomics.annotations.SearchAnnotationSetsRequest",
+   "properties": {
+    "datasetIds": {
+     "type": "array",
+     "description": "The dataset IDs to search within. Caller must have READ access to these datasets.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "name": {
+     "type": "string",
+     "description": "Only return annotations sets for which a substring of the name matches this string (case insensitive)."
+    },
+    "pageSize": {
+     "type": "integer",
+     "description": "Specifies number of results to return in a single page. If unspecified, it will default to 128. The maximum value is 1024.",
+     "format": "int32"
+    },
+    "pageToken": {
+     "type": "string",
+     "description": "The continuation token, which is used to page through large result sets. To get the next page of results, set this parameter to the value of nextPageToken from the previous response."
+    },
+    "referenceSetId": {
+     "type": "string",
+     "description": "If specified, only annotation sets associated with the given reference set are returned."
+    },
+    "types": {
+     "type": "array",
+     "description": "If specified, only annotation sets which have any of these types are returned.",
+     "items": {
+      "type": "string",
+      "enum": [
+       "GENE",
+       "GENERIC",
+       "TRANSCRIPT",
+       "VARIANT"
+      ],
+      "enumDescriptions": [
+       "",
+       "",
+       "",
+       ""
+      ]
+     }
+    }
+   }
+  },
+  "SearchAnnotationSetsResponse": {
+   "id": "SearchAnnotationSetsResponse",
+   "type": "object",
+   "externalTypeName": "cloud.genomics.annotations.SearchAnnotationSetsResponse",
+   "properties": {
+    "annotationSets": {
+     "type": "array",
+     "description": "The matching annotation sets.",
+     "items": {
+      "$ref": "AnnotationSet"
+     }
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "The continuation token, which is used to page through large result sets. Provide this value in a subsequent request to return the next page of results. This field will be empty if there aren't any additional results."
+    }
+   }
+  },
+  "SearchAnnotationsRequest": {
+   "id": "SearchAnnotationsRequest",
+   "type": "object",
+   "externalTypeName": "cloud.genomics.annotations.SearchAnnotationsRequest",
+   "properties": {
+    "annotationSetIds": {
+     "type": "array",
+     "description": "The annotation sets to search within. The caller must have READ access to these annotation sets. Required.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "pageSize": {
+     "type": "integer",
+     "description": "Specifies number of results to return in a single page. If unspecified, it will default to 256. The maximum value is 2048.",
+     "format": "int32"
+    },
+    "pageToken": {
+     "type": "string",
+     "description": "The continuation token, which is used to page through large result sets. To get the next page of results, set this parameter to the value of nextPageToken from the previous response."
+    },
+    "range": {
+     "$ref": "QueryRange",
+     "description": "If specified, this query matches only annotations which overlap this range."
+    }
+   }
+  },
+  "SearchAnnotationsResponse": {
+   "id": "SearchAnnotationsResponse",
+   "type": "object",
+   "externalTypeName": "cloud.genomics.annotations.SearchAnnotationsResponse",
+   "properties": {
+    "annotations": {
+     "type": "array",
+     "description": "The matching annotations.",
+     "items": {
+      "$ref": "Annotation"
+     }
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "The continuation token, which is used to page through large result sets. Provide this value in a subsequent request to return the next page of results. This field will be empty if there aren't any additional results."
+    }
+   }
+  },
   "SearchCallSetsRequest": {
    "id": "SearchCallSetsRequest",
    "type": "object",
@@ -1767,6 +2073,67 @@
     }
    }
   },
+  "Transcript": {
+   "id": "Transcript",
+   "type": "object",
+   "description": "A transcript represents the assertion that a particular region of the reference genome may be transcribed as RNA.",
+   "externalTypeName": "cloud.genomics.annotations.Transcript",
+   "properties": {
+    "codingSequence": {
+     "$ref": "TranscriptCodingSequence",
+     "description": "The range of the coding sequence for this transcript, if any. To determine the exact ranges of coding sequence, intersect this range with those of the exons, if any. If there are any exons, the codingSequence must start and end within them.\n\nNote that in some cases, the reference genome will not exactly match the observed mRNA transcript e.g. due to variance in the source genome from reference. In these cases, exon.frame will not necessarily match the expected reference reading frame and coding exon reference bases cannot necessarily be concatenated to produce the original transcript mRNA."
+    },
+    "exons": {
+     "type": "array",
+     "description": "The exons which compose this transcript. Exons are the pieces of the transcript which are spliced together, may be exported from a cell's nucleus, and may then be translated to protein. This field should be unset for genomes where transcript splicing does not occur, for example prokaryotes.\n\n\nIntrons are regions of the transcript which are not included in the spliced RNA product. Though not explicitly modeled here, intron ranges can be deduced; all regions of this transcript which are not exons are introns.\n\n\nExonic sequences do not necessarily code for a translational product (amino acids). Only the regions of exons bounded by the codingSequence correspond to coding DNA sequence.\n\n\nExons are ordered by start position and may not overlap.",
+     "items": {
+      "$ref": "TranscriptExon"
+     }
+    },
+    "geneId": {
+     "type": "string",
+     "description": "The annotation ID of the gene from which this transcript is transcribed."
+    }
+   }
+  },
+  "TranscriptCodingSequence": {
+   "id": "TranscriptCodingSequence",
+   "type": "object",
+   "externalTypeName": "cloud.genomics.annotations.Transcript.CodingSequence",
+   "properties": {
+    "end": {
+     "type": "string",
+     "description": "The end of the coding sequence on this annotation's reference sequence, 0-based exclusive. Note that this position is relative to the reference start, and not the containing annotation start.",
+     "format": "int64"
+    },
+    "start": {
+     "type": "string",
+     "description": "The start of the coding sequence on this annotation's reference sequence, 0-based inclusive. Note that this position is relative to the reference start, and not the containing annotation start.",
+     "format": "int64"
+    }
+   }
+  },
+  "TranscriptExon": {
+   "id": "TranscriptExon",
+   "type": "object",
+   "externalTypeName": "cloud.genomics.annotations.Transcript.Exon",
+   "properties": {
+    "end": {
+     "type": "string",
+     "description": "The end position of the exon on this annotation's reference sequence, 0-based exclusive. Note that this is relative to the reference start, and not the containing annotation start.",
+     "format": "int64"
+    },
+    "frame": {
+     "$ref": "Int32Value",
+     "description": "The frame of this exon. Contains a value of 0, 1, or 2 which indicates the offset of the first coding base of the exon within the reading frame of the coding DNA sequence, if any. This field is dependent on the strandedness of this annotation (see Annotation.position.reverseStrand). For forward stranded annotations, this offset is relative to the exon.start. For reverse strand annotations, this offset is relative to the exon.end-1.\n\nUnset if this exon does not intersect the coding sequence. Upon creation of a transcript, the frame must be populated for all or none of the coding exons."
+    },
+    "start": {
+     "type": "string",
+     "description": "The start position of the exon on this annotation's reference sequence, 0-based inclusive. Note that this is relative to the reference start, and not the containing annotation start.",
+     "format": "int64"
+    }
+   }
+  },
   "Variant": {
    "id": "Variant",
    "type": "object",
@@ -1850,6 +2217,152 @@
     }
    }
   },
+  "VariantAnnotation": {
+   "id": "VariantAnnotation",
+   "type": "object",
+   "description": "A Variant annotation.",
+   "externalTypeName": "cloud.genomics.annotations.VariantAnnotation",
+   "properties": {
+    "alternateBases": {
+     "type": "string",
+     "description": "The alternate allele for this variant. If multiple alternate alleles exist at this location, create a separate variant for each one, as they may represent distinct conditions."
+    },
+    "clinicalSignificance": {
+     "type": "string",
+     "description": "Describes the clinical significance of a variant. It is adapted from the ClinVar controlled vocabulary for clinical significance described at: http://www.ncbi.nlm.nih.gov/clinvar/docs/clinsig/",
+     "enum": [
+      "ASSOCIATION",
+      "BENIGN",
+      "CLINICAL_SIGNIFICANCE_UNSPECIFIED",
+      "CONFERS_SENSITIVITY",
+      "DRUG_RESPONSE",
+      "HISTOCOMPATIBILITY",
+      "LIKELY_BENIGN",
+      "LIKELY_PATHOGENIC",
+      "MULTIPLE_REPORTED",
+      "OTHER",
+      "PATHOGENIC",
+      "PROTECTIVE",
+      "RISK_FACTOR",
+      "UNCERTAIN"
+     ],
+     "enumDescriptions": [
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      ""
+     ]
+    },
+    "conditions": {
+     "type": "array",
+     "description": "The set of conditions associated with this variant. A condition describes the way a variant influences human health.",
+     "items": {
+      "$ref": "VariantAnnotationCondition"
+     }
+    },
+    "effect": {
+     "type": "string",
+     "description": "Effect of the variant on the coding sequence.",
+     "enum": [
+      "EFFECT_UNSPECIFIED",
+      "FRAMESHIFT",
+      "FRAME_PRESERVING_INDEL",
+      "NONSYNONYMOUS_SNP",
+      "OTHER",
+      "SPLICE_SITE_DISRUPTION",
+      "STOP_GAIN",
+      "STOP_LOSS",
+      "SYNONYMOUS_SNP"
+     ],
+     "enumDescriptions": [
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      ""
+     ]
+    },
+    "geneId": {
+     "type": "string",
+     "description": "Google annotation ID of the gene affected by this variant. This should be provided when the variant is created."
+    },
+    "transcriptIds": {
+     "type": "array",
+     "description": "Google annotation ID of the transcripts affected by this variant. These should be provided when the variant is created.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "type": {
+     "type": "string",
+     "description": "Type has been adapted from ClinVar's list of variant types.",
+     "enum": [
+      "CNV",
+      "DELETION",
+      "INSERTION",
+      "OTHER",
+      "SNP",
+      "STRUCTURAL",
+      "SUBSTITUTION",
+      "TYPE_UNSPECIFIED"
+     ],
+     "enumDescriptions": [
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      "",
+      ""
+     ]
+    }
+   }
+  },
+  "VariantAnnotationCondition": {
+   "id": "VariantAnnotationCondition",
+   "type": "object",
+   "description": "",
+   "externalTypeName": "cloud.genomics.annotations.VariantAnnotation.Condition",
+   "properties": {
+    "conceptId": {
+     "type": "string",
+     "description": "The MedGen concept id associated with this gene. Search for these IDs at http://www.ncbi.nlm.nih.gov/medgen/"
+    },
+    "externalIds": {
+     "type": "array",
+     "description": "The set of external ids for this condition.",
+     "items": {
+      "$ref": "ExternalId"
+     }
+    },
+    "names": {
+     "type": "array",
+     "description": "A set of names for the condition.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "omimId": {
+     "type": "string",
+     "description": "The OMIM id for this condition. Search for these IDs at http://omim.org/"
+    }
+   }
+  },
   "VariantSet": {
    "id": "VariantSet",
    "type": "object",
@@ -1882,6 +2395,268 @@
   }
  },
  "resources": {
+  "annotationSets": {
+   "methods": {
+    "create": {
+     "id": "genomics.annotationSets.create",
+     "path": "annotationSets",
+     "httpMethod": "POST",
+     "description": "Creates a new annotation set. Caller must have WRITE permission for the associated dataset.",
+     "request": {
+      "$ref": "AnnotationSet"
+     },
+     "response": {
+      "$ref": "AnnotationSet"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/genomics"
+     ]
+    },
+    "delete": {
+     "id": "genomics.annotationSets.delete",
+     "path": "annotationSets/{annotationSetId}",
+     "httpMethod": "DELETE",
+     "description": "Deletes an annotation set. Caller must have WRITE permission for the associated annotation set.",
+     "parameters": {
+      "annotationSetId": {
+       "type": "string",
+       "description": "The ID of the annotation set to be deleted.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "annotationSetId"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/genomics"
+     ]
+    },
+    "get": {
+     "id": "genomics.annotationSets.get",
+     "path": "annotationSets/{annotationSetId}",
+     "httpMethod": "GET",
+     "description": "Gets an annotation set. Caller must have READ permission for the associated dataset.",
+     "parameters": {
+      "annotationSetId": {
+       "type": "string",
+       "description": "The ID of the annotation set to be retrieved.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "annotationSetId"
+     ],
+     "response": {
+      "$ref": "AnnotationSet"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/genomics",
+      "https://www.googleapis.com/auth/genomics.readonly"
+     ]
+    },
+    "patch": {
+     "id": "genomics.annotationSets.patch",
+     "path": "annotationSets/{annotationSetId}",
+     "httpMethod": "PATCH",
+     "description": "Updates an annotation set. The update must respect all mutability restrictions and other invariants described on the annotation set resource. Caller must have WRITE permission for the associated dataset. This method supports patch semantics.",
+     "parameters": {
+      "annotationSetId": {
+       "type": "string",
+       "description": "The ID of the annotation set to be updated.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "annotationSetId"
+     ],
+     "request": {
+      "$ref": "AnnotationSet"
+     },
+     "response": {
+      "$ref": "AnnotationSet"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/genomics"
+     ]
+    },
+    "search": {
+     "id": "genomics.annotationSets.search",
+     "path": "annotationSets/search",
+     "httpMethod": "POST",
+     "description": "Searches for annotation sets which match the given criteria. Results are returned in a deterministic order. Caller must have READ permission for the queried datasets.",
+     "request": {
+      "$ref": "SearchAnnotationSetsRequest"
+     },
+     "response": {
+      "$ref": "SearchAnnotationSetsResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/genomics",
+      "https://www.googleapis.com/auth/genomics.readonly"
+     ]
+    },
+    "update": {
+     "id": "genomics.annotationSets.update",
+     "path": "annotationSets/{annotationSetId}",
+     "httpMethod": "PUT",
+     "description": "Updates an annotation set. The update must respect all mutability restrictions and other invariants described on the annotation set resource. Caller must have WRITE permission for the associated dataset.",
+     "parameters": {
+      "annotationSetId": {
+       "type": "string",
+       "description": "The ID of the annotation set to be updated.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "annotationSetId"
+     ],
+     "request": {
+      "$ref": "AnnotationSet"
+     },
+     "response": {
+      "$ref": "AnnotationSet"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/genomics"
+     ]
+    }
+   }
+  },
+  "annotations": {
+   "methods": {
+    "create": {
+     "id": "genomics.annotations.create",
+     "path": "annotations",
+     "httpMethod": "POST",
+     "description": "Creates a new annotation. Caller must have WRITE permission for the associated annotation set.",
+     "request": {
+      "$ref": "Annotation"
+     },
+     "response": {
+      "$ref": "Annotation"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/genomics"
+     ]
+    },
+    "delete": {
+     "id": "genomics.annotations.delete",
+     "path": "annotations/{annotationId}",
+     "httpMethod": "DELETE",
+     "description": "Deletes an annotation. Caller must have WRITE permission for the associated annotation set.",
+     "parameters": {
+      "annotationId": {
+       "type": "string",
+       "description": "The ID of the annotation set to be deleted.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "annotationId"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/genomics"
+     ]
+    },
+    "get": {
+     "id": "genomics.annotations.get",
+     "path": "annotations/{annotationId}",
+     "httpMethod": "GET",
+     "description": "Gets an annotation. Caller must have READ permission for the associated annotation set.",
+     "parameters": {
+      "annotationId": {
+       "type": "string",
+       "description": "The ID of the annotation set to be retrieved.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "annotationId"
+     ],
+     "response": {
+      "$ref": "Annotation"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/genomics",
+      "https://www.googleapis.com/auth/genomics.readonly"
+     ]
+    },
+    "patch": {
+     "id": "genomics.annotations.patch",
+     "path": "annotations/{annotationId}",
+     "httpMethod": "PATCH",
+     "description": "Updates an annotation. The update must respect all mutability restrictions and other invariants described on the annotation resource. Caller must have WRITE permission for the associated dataset. This method supports patch semantics.",
+     "parameters": {
+      "annotationId": {
+       "type": "string",
+       "description": "The ID of the annotation set to be updated.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "annotationId"
+     ],
+     "request": {
+      "$ref": "Annotation"
+     },
+     "response": {
+      "$ref": "Annotation"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/genomics"
+     ]
+    },
+    "search": {
+     "id": "genomics.annotations.search",
+     "path": "annotations/search",
+     "httpMethod": "POST",
+     "description": "Searches for annotations which match the given criteria. Results are returned ordered by start position. Annotations which have matching start positions are ordered deterministically. Caller must have READ permission for the queried annotation sets.",
+     "request": {
+      "$ref": "SearchAnnotationsRequest"
+     },
+     "response": {
+      "$ref": "SearchAnnotationsResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/genomics",
+      "https://www.googleapis.com/auth/genomics.readonly"
+     ]
+    },
+    "update": {
+     "id": "genomics.annotations.update",
+     "path": "annotations/{annotationId}",
+     "httpMethod": "PUT",
+     "description": "Updates an annotation. The update must respect all mutability restrictions and other invariants described on the annotation resource. Caller must have WRITE permission for the associated dataset.",
+     "parameters": {
+      "annotationId": {
+       "type": "string",
+       "description": "The ID of the annotation set to be updated.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "annotationId"
+     ],
+     "request": {
+      "$ref": "Annotation"
+     },
+     "response": {
+      "$ref": "Annotation"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/genomics"
+     ]
+    }
+   }
+  },
   "callsets": {
    "methods": {
     "create": {
diff --git a/genomics/v1beta2/genomics-gen.go b/genomics/v1beta2/genomics-gen.go
index c140b03..b25f66f 100644
--- a/genomics/v1beta2/genomics-gen.go
+++ b/genomics/v1beta2/genomics-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "genomics:v1beta2"
 const apiName = "genomics"
@@ -59,6 +61,8 @@
 		return nil, errors.New("client is nil")
 	}
 	s := &Service{client: client, BasePath: basePath}
+	s.AnnotationSets = NewAnnotationSetsService(s)
+	s.Annotations = NewAnnotationsService(s)
 	s.Callsets = NewCallsetsService(s)
 	s.Datasets = NewDatasetsService(s)
 	s.Experimental = NewExperimentalService(s)
@@ -76,6 +80,10 @@
 	client   *http.Client
 	BasePath string // API endpoint base URL
 
+	AnnotationSets *AnnotationSetsService
+
+	Annotations *AnnotationsService
+
 	Callsets *CallsetsService
 
 	Datasets *DatasetsService
@@ -97,6 +105,24 @@
 	Variantsets *VariantsetsService
 }
 
+func NewAnnotationSetsService(s *Service) *AnnotationSetsService {
+	rs := &AnnotationSetsService{s: s}
+	return rs
+}
+
+type AnnotationSetsService struct {
+	s *Service
+}
+
+func NewAnnotationsService(s *Service) *AnnotationsService {
+	rs := &AnnotationsService{s: s}
+	return rs
+}
+
+type AnnotationsService struct {
+	s *Service
+}
+
 func NewCallsetsService(s *Service) *CallsetsService {
 	rs := &CallsetsService{s: s}
 	return rs
@@ -225,7 +251,7 @@
 
 type AlignReadGroupSetsRequest struct {
 	// BamSourceUris: The BAM source files for alignment. Exactly one of
-	// readGroupSetIds, bamSourceUris, interleavedFastqSource or
+	// readGroupSetId, bamSourceUris, interleavedFastqSource or
 	// pairedFastqSource must be provided. The caller must have READ
 	// permissions for these files.
 	BamSourceUris []string `json:"bamSourceUris,omitempty"`
@@ -238,23 +264,23 @@
 	// InterleavedFastqSource: The interleaved FASTQ source files for
 	// alignment, where both members of each pair of reads are found on
 	// consecutive records within the same FASTQ file. Exactly one of
-	// readGroupSetIds, bamSourceUris, interleavedFastqSource or
+	// readGroupSetId, bamSourceUris, interleavedFastqSource or
 	// pairedFastqSource must be provided.
 	InterleavedFastqSource *InterleavedFastqSource `json:"interleavedFastqSource,omitempty"`
 
 	// PairedFastqSource: The paired end FASTQ source files for alignment,
 	// where each member of a pair of reads are found in separate files.
-	// Exactly one of readGroupSetIds, bamSourceUris, interleavedFastqSource
+	// Exactly one of readGroupSetId, bamSourceUris, interleavedFastqSource
 	// or pairedFastqSource must be provided.
 	PairedFastqSource *PairedFastqSource `json:"pairedFastqSource,omitempty"`
 
-	// ReadGroupSetIds: The IDs of the read group sets which will be
-	// aligned. New read group sets will be generated to hold the aligned
-	// data, the originals will not be modified. The caller must have READ
-	// permissions for these read group sets. Exactly one of
-	// readGroupSetIds, bamSourceUris, interleavedFastqSource or
-	// pairedFastqSource must be provided.
-	ReadGroupSetIds []string `json:"readGroupSetIds,omitempty"`
+	// ReadGroupSetId: The ID of the read group set which will be aligned. A
+	// new read group set will be generated to hold the aligned data, the
+	// originals will not be modified. The caller must have READ permissions
+	// for this read group set. Exactly one of readGroupSetId,
+	// bamSourceUris, interleavedFastqSource or pairedFastqSource must be
+	// provided.
+	ReadGroupSetId string `json:"readGroupSetId,omitempty"`
 }
 
 type AlignReadGroupSetsResponse struct {
@@ -262,6 +288,65 @@
 	JobId string `json:"jobId,omitempty"`
 }
 
+type Annotation struct {
+	// AnnotationSetId: The ID of the containing annotation set.
+	AnnotationSetId string `json:"annotationSetId,omitempty"`
+
+	// Id: The generated unique ID for this annotation.
+	Id string `json:"id,omitempty"`
+
+	// Info: A map of additional data for this annotation.
+	Info map[string][]string `json:"info,omitempty"`
+
+	// Name: The display name of this annotation.
+	Name string `json:"name,omitempty"`
+
+	// Position: The position of this annotation on the reference sequence.
+	Position *RangePosition `json:"position,omitempty"`
+
+	// Transcript: A transcript value represents the assertion that a
+	// particular region of the reference genome may be transcribed as RNA.
+	// An alternate splicing pattern would be represented as separate
+	// transcript object. This field is only set for annotations of type
+	// TRANSCRIPT.
+	Transcript *Transcript `json:"transcript,omitempty"`
+
+	// Type: The data type for this annotation. Must match the containing
+	// annotation set's type.
+	Type string `json:"type,omitempty"`
+
+	// Variant: A variant annotation which describes the effect of a variant
+	// on the genome, the coding sequence, and/or higher level consequences
+	// at the organism level e.g. pathogenicity. This field is only set for
+	// annotations of type VARIANT.
+	Variant *VariantAnnotation `json:"variant,omitempty"`
+}
+
+type AnnotationSet struct {
+	// DatasetId: The ID of the containing dataset.
+	DatasetId string `json:"datasetId,omitempty"`
+
+	// Id: The generated unique ID for this annotation set.
+	Id string `json:"id,omitempty"`
+
+	// Info: A map of additional data for this annotation set.
+	Info map[string][]string `json:"info,omitempty"`
+
+	// Name: The display name for this annotation set.
+	Name string `json:"name,omitempty"`
+
+	// ReferenceSetId: The ID of the reference set which defines the
+	// coordinate-space for this set's annotations.
+	ReferenceSetId string `json:"referenceSetId,omitempty"`
+
+	// SourceUri: The source URI describing the file from which this
+	// annotation set was generated, if any.
+	SourceUri string `json:"sourceUri,omitempty"`
+
+	// Type: The type of annotations contained within this set.
+	Type string `json:"type,omitempty"`
+}
+
 type Call struct {
 	// CallSetId: The ID of the call set this variant call belongs to.
 	CallSetId string `json:"callSetId,omitempty"`
@@ -306,14 +391,14 @@
 	// belong to. The caller must have WRITE permissions to this dataset.
 	DatasetId string `json:"datasetId,omitempty"`
 
-	// ReadGroupSetIds: The IDs of the read group sets which will be called.
+	// ReadGroupSetId: The IDs of the read group sets which will be called.
 	// The caller must have READ permissions for these read group sets. One
-	// of readGroupSetIds or sourceUris must be provided.
-	ReadGroupSetIds []string `json:"readGroupSetIds,omitempty"`
+	// of readGroupSetId or sourceUris must be provided.
+	ReadGroupSetId string `json:"readGroupSetId,omitempty"`
 
 	// SourceUris: A list of URIs pointing at BAM files in Google Cloud
 	// Storage which will be called. FASTQ files are not allowed. The caller
-	// must have READ permissions for these files. One of readGroupSetIds or
+	// must have READ permissions for these files. One of readGroupSetId or
 	// sourceUris must be provided.
 	SourceUris []string `json:"sourceUris,omitempty"`
 }
@@ -474,6 +559,14 @@
 	JobId string `json:"jobId,omitempty"`
 }
 
+type ExternalId struct {
+	// Id: The id used by the source of this data.
+	Id string `json:"id,omitempty"`
+
+	// SourceName: The name of the source of this data.
+	SourceName string `json:"sourceName,omitempty"`
+}
+
 type FastqMetadata struct {
 	// LibraryName: Optionally specifies the library name for alignment from
 	// FASTQ.
@@ -539,6 +632,11 @@
 	JobId string `json:"jobId,omitempty"`
 }
 
+type Int32Value struct {
+	// Value: The int32 value.
+	Value int64 `json:"value,omitempty"`
+}
+
 type InterleavedFastqSource struct {
 	// Metadata: Optionally specifies the metadata to be associated with the
 	// final aligned read group set.
@@ -726,6 +824,27 @@
 	ReverseStrand bool `json:"reverseStrand,omitempty"`
 }
 
+type QueryRange struct {
+	// End: The end position of the range on the reference, 0-based
+	// exclusive. If specified, referenceId or referenceName must also be
+	// specified. If unset or 0, defaults to the length of the reference.
+	End int64 `json:"end,omitempty,string"`
+
+	// ReferenceId: The ID of the reference to query. At most one of
+	// referenceId and referenceName should be specified.
+	ReferenceId string `json:"referenceId,omitempty"`
+
+	// ReferenceName: The name of the reference to query, within the
+	// reference set associated with this query. At most one of referenceId
+	// and referenceName pshould be specified.
+	ReferenceName string `json:"referenceName,omitempty"`
+
+	// Start: The start position of the range on the reference, 0-based
+	// inclusive. If specified, referenceId or referenceName must also be
+	// specified. Defaults to 0.
+	Start int64 `json:"start,omitempty,string"`
+}
+
 type Range struct {
 	// End: The end position of the range on the reference, 0-based
 	// exclusive. If specified, referenceName must also be specified.
@@ -740,6 +859,30 @@
 	Start int64 `json:"start,omitempty,string"`
 }
 
+type RangePosition struct {
+	// End: The end position of the range on the reference, 0-based
+	// exclusive.
+	End int64 `json:"end,omitempty,string"`
+
+	// ReferenceId: The ID of the Google Genomics reference associated with
+	// this range.
+	ReferenceId string `json:"referenceId,omitempty"`
+
+	// ReferenceName: The display name corresponding to the reference
+	// specified by referenceId, for example chr1, 1, or chrX.
+	ReferenceName string `json:"referenceName,omitempty"`
+
+	// ReverseStrand: Whether this range refers to the reverse strand, as
+	// opposed to the forward strand. Note that regardless of this field,
+	// the start/end position of the range always refer to the forward
+	// strand.
+	ReverseStrand bool `json:"reverseStrand,omitempty"`
+
+	// Start: The start position of the range on the reference, 0-based
+	// inclusive.
+	Start int64 `json:"start,omitempty,string"`
+}
+
 type Read struct {
 	// AlignedQuality: The quality of the read sequence contained in this
 	// alignment record. alignedSequence and alignedQuality may be shorter
@@ -1018,6 +1161,74 @@
 	SourceURI string `json:"sourceURI,omitempty"`
 }
 
+type SearchAnnotationSetsRequest struct {
+	// DatasetIds: The dataset IDs to search within. Caller must have READ
+	// access to these datasets.
+	DatasetIds []string `json:"datasetIds,omitempty"`
+
+	// Name: Only return annotations sets for which a substring of the name
+	// matches this string (case insensitive).
+	Name string `json:"name,omitempty"`
+
+	// PageSize: Specifies number of results to return in a single page. If
+	// unspecified, it will default to 128. The maximum value is 1024.
+	PageSize int64 `json:"pageSize,omitempty"`
+
+	// PageToken: The continuation token, which is used to page through
+	// large result sets. To get the next page of results, set this
+	// parameter to the value of nextPageToken from the previous response.
+	PageToken string `json:"pageToken,omitempty"`
+
+	// ReferenceSetId: If specified, only annotation sets associated with
+	// the given reference set are returned.
+	ReferenceSetId string `json:"referenceSetId,omitempty"`
+
+	// Types: If specified, only annotation sets which have any of these
+	// types are returned.
+	Types []string `json:"types,omitempty"`
+}
+
+type SearchAnnotationSetsResponse struct {
+	// AnnotationSets: The matching annotation sets.
+	AnnotationSets []*AnnotationSet `json:"annotationSets,omitempty"`
+
+	// NextPageToken: The continuation token, which is used to page through
+	// large result sets. Provide this value in a subsequent request to
+	// return the next page of results. This field will be empty if there
+	// aren't any additional results.
+	NextPageToken string `json:"nextPageToken,omitempty"`
+}
+
+type SearchAnnotationsRequest struct {
+	// AnnotationSetIds: The annotation sets to search within. The caller
+	// must have READ access to these annotation sets. Required.
+	AnnotationSetIds []string `json:"annotationSetIds,omitempty"`
+
+	// PageSize: Specifies number of results to return in a single page. If
+	// unspecified, it will default to 256. The maximum value is 2048.
+	PageSize int64 `json:"pageSize,omitempty"`
+
+	// PageToken: The continuation token, which is used to page through
+	// large result sets. To get the next page of results, set this
+	// parameter to the value of nextPageToken from the previous response.
+	PageToken string `json:"pageToken,omitempty"`
+
+	// Range: If specified, this query matches only annotations which
+	// overlap this range.
+	Range *QueryRange `json:"range,omitempty"`
+}
+
+type SearchAnnotationsResponse struct {
+	// Annotations: The matching annotations.
+	Annotations []*Annotation `json:"annotations,omitempty"`
+
+	// NextPageToken: The continuation token, which is used to page through
+	// large result sets. Provide this value in a subsequent request to
+	// return the next page of results. This field will be empty if there
+	// aren't any additional results.
+	NextPageToken string `json:"nextPageToken,omitempty"`
+}
+
 type SearchCallSetsRequest struct {
 	// Name: Only return call sets for which a substring of the name matches
 	// this string.
@@ -1310,6 +1521,87 @@
 	Variants []*Variant `json:"variants,omitempty"`
 }
 
+type Transcript struct {
+	// CodingSequence: The range of the coding sequence for this transcript,
+	// if any. To determine the exact ranges of coding sequence, intersect
+	// this range with those of the exons, if any. If there are any exons,
+	// the codingSequence must start and end within them.
+	//
+	// Note that in some
+	// cases, the reference genome will not exactly match the observed mRNA
+	// transcript e.g. due to variance in the source genome from reference.
+	// In these cases, exon.frame will not necessarily match the expected
+	// reference reading frame and coding exon reference bases cannot
+	// necessarily be concatenated to produce the original transcript mRNA.
+	CodingSequence *TranscriptCodingSequence `json:"codingSequence,omitempty"`
+
+	// Exons: The exons which compose this transcript. Exons are the pieces
+	// of the transcript which are spliced together, may be exported from a
+	// cell's nucleus, and may then be translated to protein. This field
+	// should be unset for genomes where transcript splicing does not occur,
+	// for example prokaryotes.
+	//
+	//
+	// Introns are regions of the transcript
+	// which are not included in the spliced RNA product. Though not
+	// explicitly modeled here, intron ranges can be deduced; all regions of
+	// this transcript which are not exons are introns.
+	//
+	//
+	// Exonic sequences
+	// do not necessarily code for a translational product (amino acids).
+	// Only the regions of exons bounded by the codingSequence correspond to
+	// coding DNA sequence.
+	//
+	//
+	// Exons are ordered by start position and may
+	// not overlap.
+	Exons []*TranscriptExon `json:"exons,omitempty"`
+
+	// GeneId: The annotation ID of the gene from which this transcript is
+	// transcribed.
+	GeneId string `json:"geneId,omitempty"`
+}
+
+type TranscriptCodingSequence struct {
+	// End: The end of the coding sequence on this annotation's reference
+	// sequence, 0-based exclusive. Note that this position is relative to
+	// the reference start, and not the containing annotation start.
+	End int64 `json:"end,omitempty,string"`
+
+	// Start: The start of the coding sequence on this annotation's
+	// reference sequence, 0-based inclusive. Note that this position is
+	// relative to the reference start, and not the containing annotation
+	// start.
+	Start int64 `json:"start,omitempty,string"`
+}
+
+type TranscriptExon struct {
+	// End: The end position of the exon on this annotation's reference
+	// sequence, 0-based exclusive. Note that this is relative to the
+	// reference start, and not the containing annotation start.
+	End int64 `json:"end,omitempty,string"`
+
+	// Frame: The frame of this exon. Contains a value of 0, 1, or 2 which
+	// indicates the offset of the first coding base of the exon within the
+	// reading frame of the coding DNA sequence, if any. This field is
+	// dependent on the strandedness of this annotation (see
+	// Annotation.position.reverseStrand). For forward stranded annotations,
+	// this offset is relative to the exon.start. For reverse strand
+	// annotations, this offset is relative to the exon.end-1.
+	//
+	// Unset if
+	// this exon does not intersect the coding sequence. Upon creation of a
+	// transcript, the frame must be populated for all or none of the coding
+	// exons.
+	Frame *Int32Value `json:"frame,omitempty"`
+
+	// Start: The start position of the exon on this annotation's reference
+	// sequence, 0-based inclusive. Note that this is relative to the
+	// reference start, and not the containing annotation start.
+	Start int64 `json:"start,omitempty,string"`
+}
+
 type Variant struct {
 	// AlternateBases: The bases that appear instead of the reference bases.
 	AlternateBases []string `json:"alternateBases,omitempty"`
@@ -1363,6 +1655,53 @@
 	VariantSetId string `json:"variantSetId,omitempty"`
 }
 
+type VariantAnnotation struct {
+	// AlternateBases: The alternate allele for this variant. If multiple
+	// alternate alleles exist at this location, create a separate variant
+	// for each one, as they may represent distinct conditions.
+	AlternateBases string `json:"alternateBases,omitempty"`
+
+	// ClinicalSignificance: Describes the clinical significance of a
+	// variant. It is adapted from the ClinVar controlled vocabulary for
+	// clinical significance described at:
+	// http://www.ncbi.nlm.nih.gov/clinvar/docs/clinsig/
+	ClinicalSignificance string `json:"clinicalSignificance,omitempty"`
+
+	// Conditions: The set of conditions associated with this variant. A
+	// condition describes the way a variant influences human health.
+	Conditions []*VariantAnnotationCondition `json:"conditions,omitempty"`
+
+	// Effect: Effect of the variant on the coding sequence.
+	Effect string `json:"effect,omitempty"`
+
+	// GeneId: Google annotation ID of the gene affected by this variant.
+	// This should be provided when the variant is created.
+	GeneId string `json:"geneId,omitempty"`
+
+	// TranscriptIds: Google annotation ID of the transcripts affected by
+	// this variant. These should be provided when the variant is created.
+	TranscriptIds []string `json:"transcriptIds,omitempty"`
+
+	// Type: Type has been adapted from ClinVar's list of variant types.
+	Type string `json:"type,omitempty"`
+}
+
+type VariantAnnotationCondition struct {
+	// ConceptId: The MedGen concept id associated with this gene. Search
+	// for these IDs at http://www.ncbi.nlm.nih.gov/medgen/
+	ConceptId string `json:"conceptId,omitempty"`
+
+	// ExternalIds: The set of external ids for this condition.
+	ExternalIds []*ExternalId `json:"externalIds,omitempty"`
+
+	// Names: A set of names for the condition.
+	Names []string `json:"names,omitempty"`
+
+	// OmimId: The OMIM id for this condition. Search for these IDs at
+	// http://omim.org/
+	OmimId string `json:"omimId,omitempty"`
+}
+
 type VariantSet struct {
 	// DatasetId: The dataset to which this variant set belongs. Immutable.
 	DatasetId string `json:"datasetId,omitempty"`
@@ -1378,6 +1717,959 @@
 	ReferenceBounds []*ReferenceBound `json:"referenceBounds,omitempty"`
 }
 
+// method id "genomics.annotationSets.create":
+
+type AnnotationSetsCreateCall struct {
+	s             *Service
+	annotationset *AnnotationSet
+	opt_          map[string]interface{}
+}
+
+// Create: Creates a new annotation set. Caller must have WRITE
+// permission for the associated dataset.
+func (r *AnnotationSetsService) Create(annotationset *AnnotationSet) *AnnotationSetsCreateCall {
+	c := &AnnotationSetsCreateCall{s: r.s, opt_: make(map[string]interface{})}
+	c.annotationset = annotationset
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AnnotationSetsCreateCall) Fields(s ...googleapi.Field) *AnnotationSetsCreateCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AnnotationSetsCreateCall) Do() (*AnnotationSet, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.annotationset)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "annotationSets")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("POST", urls, body)
+	googleapi.SetOpaque(req.URL)
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AnnotationSet
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Creates a new annotation set. Caller must have WRITE permission for the associated dataset.",
+	//   "httpMethod": "POST",
+	//   "id": "genomics.annotationSets.create",
+	//   "path": "annotationSets",
+	//   "request": {
+	//     "$ref": "AnnotationSet"
+	//   },
+	//   "response": {
+	//     "$ref": "AnnotationSet"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/genomics"
+	//   ]
+	// }
+
+}
+
+// method id "genomics.annotationSets.delete":
+
+type AnnotationSetsDeleteCall struct {
+	s               *Service
+	annotationSetId string
+	opt_            map[string]interface{}
+}
+
+// Delete: Deletes an annotation set. Caller must have WRITE permission
+// for the associated annotation set.
+func (r *AnnotationSetsService) Delete(annotationSetId string) *AnnotationSetsDeleteCall {
+	c := &AnnotationSetsDeleteCall{s: r.s, opt_: make(map[string]interface{})}
+	c.annotationSetId = annotationSetId
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AnnotationSetsDeleteCall) Fields(s ...googleapi.Field) *AnnotationSetsDeleteCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AnnotationSetsDeleteCall) Do() error {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "annotationSets/{annotationSetId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("DELETE", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"annotationSetId": c.annotationSetId,
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return err
+	}
+	return nil
+	// {
+	//   "description": "Deletes an annotation set. Caller must have WRITE permission for the associated annotation set.",
+	//   "httpMethod": "DELETE",
+	//   "id": "genomics.annotationSets.delete",
+	//   "parameterOrder": [
+	//     "annotationSetId"
+	//   ],
+	//   "parameters": {
+	//     "annotationSetId": {
+	//       "description": "The ID of the annotation set to be deleted.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "annotationSets/{annotationSetId}",
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/genomics"
+	//   ]
+	// }
+
+}
+
+// method id "genomics.annotationSets.get":
+
+type AnnotationSetsGetCall struct {
+	s               *Service
+	annotationSetId string
+	opt_            map[string]interface{}
+}
+
+// Get: Gets an annotation set. Caller must have READ permission for the
+// associated dataset.
+func (r *AnnotationSetsService) Get(annotationSetId string) *AnnotationSetsGetCall {
+	c := &AnnotationSetsGetCall{s: r.s, opt_: make(map[string]interface{})}
+	c.annotationSetId = annotationSetId
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AnnotationSetsGetCall) Fields(s ...googleapi.Field) *AnnotationSetsGetCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AnnotationSetsGetCall) Do() (*AnnotationSet, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "annotationSets/{annotationSetId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"annotationSetId": c.annotationSetId,
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AnnotationSet
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Gets an annotation set. Caller must have READ permission for the associated dataset.",
+	//   "httpMethod": "GET",
+	//   "id": "genomics.annotationSets.get",
+	//   "parameterOrder": [
+	//     "annotationSetId"
+	//   ],
+	//   "parameters": {
+	//     "annotationSetId": {
+	//       "description": "The ID of the annotation set to be retrieved.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "annotationSets/{annotationSetId}",
+	//   "response": {
+	//     "$ref": "AnnotationSet"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/genomics",
+	//     "https://www.googleapis.com/auth/genomics.readonly"
+	//   ]
+	// }
+
+}
+
+// method id "genomics.annotationSets.patch":
+
+type AnnotationSetsPatchCall struct {
+	s               *Service
+	annotationSetId string
+	annotationset   *AnnotationSet
+	opt_            map[string]interface{}
+}
+
+// Patch: Updates an annotation set. The update must respect all
+// mutability restrictions and other invariants described on the
+// annotation set resource. Caller must have WRITE permission for the
+// associated dataset. This method supports patch semantics.
+func (r *AnnotationSetsService) Patch(annotationSetId string, annotationset *AnnotationSet) *AnnotationSetsPatchCall {
+	c := &AnnotationSetsPatchCall{s: r.s, opt_: make(map[string]interface{})}
+	c.annotationSetId = annotationSetId
+	c.annotationset = annotationset
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AnnotationSetsPatchCall) Fields(s ...googleapi.Field) *AnnotationSetsPatchCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AnnotationSetsPatchCall) Do() (*AnnotationSet, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.annotationset)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "annotationSets/{annotationSetId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("PATCH", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"annotationSetId": c.annotationSetId,
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AnnotationSet
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Updates an annotation set. The update must respect all mutability restrictions and other invariants described on the annotation set resource. Caller must have WRITE permission for the associated dataset. This method supports patch semantics.",
+	//   "httpMethod": "PATCH",
+	//   "id": "genomics.annotationSets.patch",
+	//   "parameterOrder": [
+	//     "annotationSetId"
+	//   ],
+	//   "parameters": {
+	//     "annotationSetId": {
+	//       "description": "The ID of the annotation set to be updated.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "annotationSets/{annotationSetId}",
+	//   "request": {
+	//     "$ref": "AnnotationSet"
+	//   },
+	//   "response": {
+	//     "$ref": "AnnotationSet"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/genomics"
+	//   ]
+	// }
+
+}
+
+// method id "genomics.annotationSets.search":
+
+type AnnotationSetsSearchCall struct {
+	s                           *Service
+	searchannotationsetsrequest *SearchAnnotationSetsRequest
+	opt_                        map[string]interface{}
+}
+
+// Search: Searches for annotation sets which match the given criteria.
+// Results are returned in a deterministic order. Caller must have READ
+// permission for the queried datasets.
+func (r *AnnotationSetsService) Search(searchannotationsetsrequest *SearchAnnotationSetsRequest) *AnnotationSetsSearchCall {
+	c := &AnnotationSetsSearchCall{s: r.s, opt_: make(map[string]interface{})}
+	c.searchannotationsetsrequest = searchannotationsetsrequest
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AnnotationSetsSearchCall) Fields(s ...googleapi.Field) *AnnotationSetsSearchCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AnnotationSetsSearchCall) Do() (*SearchAnnotationSetsResponse, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.searchannotationsetsrequest)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "annotationSets/search")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("POST", urls, body)
+	googleapi.SetOpaque(req.URL)
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *SearchAnnotationSetsResponse
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Searches for annotation sets which match the given criteria. Results are returned in a deterministic order. Caller must have READ permission for the queried datasets.",
+	//   "httpMethod": "POST",
+	//   "id": "genomics.annotationSets.search",
+	//   "path": "annotationSets/search",
+	//   "request": {
+	//     "$ref": "SearchAnnotationSetsRequest"
+	//   },
+	//   "response": {
+	//     "$ref": "SearchAnnotationSetsResponse"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/genomics",
+	//     "https://www.googleapis.com/auth/genomics.readonly"
+	//   ]
+	// }
+
+}
+
+// method id "genomics.annotationSets.update":
+
+type AnnotationSetsUpdateCall struct {
+	s               *Service
+	annotationSetId string
+	annotationset   *AnnotationSet
+	opt_            map[string]interface{}
+}
+
+// Update: Updates an annotation set. The update must respect all
+// mutability restrictions and other invariants described on the
+// annotation set resource. Caller must have WRITE permission for the
+// associated dataset.
+func (r *AnnotationSetsService) Update(annotationSetId string, annotationset *AnnotationSet) *AnnotationSetsUpdateCall {
+	c := &AnnotationSetsUpdateCall{s: r.s, opt_: make(map[string]interface{})}
+	c.annotationSetId = annotationSetId
+	c.annotationset = annotationset
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AnnotationSetsUpdateCall) Fields(s ...googleapi.Field) *AnnotationSetsUpdateCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AnnotationSetsUpdateCall) Do() (*AnnotationSet, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.annotationset)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "annotationSets/{annotationSetId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("PUT", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"annotationSetId": c.annotationSetId,
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *AnnotationSet
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Updates an annotation set. The update must respect all mutability restrictions and other invariants described on the annotation set resource. Caller must have WRITE permission for the associated dataset.",
+	//   "httpMethod": "PUT",
+	//   "id": "genomics.annotationSets.update",
+	//   "parameterOrder": [
+	//     "annotationSetId"
+	//   ],
+	//   "parameters": {
+	//     "annotationSetId": {
+	//       "description": "The ID of the annotation set to be updated.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "annotationSets/{annotationSetId}",
+	//   "request": {
+	//     "$ref": "AnnotationSet"
+	//   },
+	//   "response": {
+	//     "$ref": "AnnotationSet"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/genomics"
+	//   ]
+	// }
+
+}
+
+// method id "genomics.annotations.create":
+
+type AnnotationsCreateCall struct {
+	s          *Service
+	annotation *Annotation
+	opt_       map[string]interface{}
+}
+
+// Create: Creates a new annotation. Caller must have WRITE permission
+// for the associated annotation set.
+func (r *AnnotationsService) Create(annotation *Annotation) *AnnotationsCreateCall {
+	c := &AnnotationsCreateCall{s: r.s, opt_: make(map[string]interface{})}
+	c.annotation = annotation
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AnnotationsCreateCall) Fields(s ...googleapi.Field) *AnnotationsCreateCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AnnotationsCreateCall) Do() (*Annotation, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.annotation)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "annotations")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("POST", urls, body)
+	googleapi.SetOpaque(req.URL)
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *Annotation
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Creates a new annotation. Caller must have WRITE permission for the associated annotation set.",
+	//   "httpMethod": "POST",
+	//   "id": "genomics.annotations.create",
+	//   "path": "annotations",
+	//   "request": {
+	//     "$ref": "Annotation"
+	//   },
+	//   "response": {
+	//     "$ref": "Annotation"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/genomics"
+	//   ]
+	// }
+
+}
+
+// method id "genomics.annotations.delete":
+
+type AnnotationsDeleteCall struct {
+	s            *Service
+	annotationId string
+	opt_         map[string]interface{}
+}
+
+// Delete: Deletes an annotation. Caller must have WRITE permission for
+// the associated annotation set.
+func (r *AnnotationsService) Delete(annotationId string) *AnnotationsDeleteCall {
+	c := &AnnotationsDeleteCall{s: r.s, opt_: make(map[string]interface{})}
+	c.annotationId = annotationId
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AnnotationsDeleteCall) Fields(s ...googleapi.Field) *AnnotationsDeleteCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AnnotationsDeleteCall) Do() error {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "annotations/{annotationId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("DELETE", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"annotationId": c.annotationId,
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return err
+	}
+	return nil
+	// {
+	//   "description": "Deletes an annotation. Caller must have WRITE permission for the associated annotation set.",
+	//   "httpMethod": "DELETE",
+	//   "id": "genomics.annotations.delete",
+	//   "parameterOrder": [
+	//     "annotationId"
+	//   ],
+	//   "parameters": {
+	//     "annotationId": {
+	//       "description": "The ID of the annotation set to be deleted.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "annotations/{annotationId}",
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/genomics"
+	//   ]
+	// }
+
+}
+
+// method id "genomics.annotations.get":
+
+type AnnotationsGetCall struct {
+	s            *Service
+	annotationId string
+	opt_         map[string]interface{}
+}
+
+// Get: Gets an annotation. Caller must have READ permission for the
+// associated annotation set.
+func (r *AnnotationsService) Get(annotationId string) *AnnotationsGetCall {
+	c := &AnnotationsGetCall{s: r.s, opt_: make(map[string]interface{})}
+	c.annotationId = annotationId
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AnnotationsGetCall) Fields(s ...googleapi.Field) *AnnotationsGetCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AnnotationsGetCall) Do() (*Annotation, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "annotations/{annotationId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"annotationId": c.annotationId,
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *Annotation
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Gets an annotation. Caller must have READ permission for the associated annotation set.",
+	//   "httpMethod": "GET",
+	//   "id": "genomics.annotations.get",
+	//   "parameterOrder": [
+	//     "annotationId"
+	//   ],
+	//   "parameters": {
+	//     "annotationId": {
+	//       "description": "The ID of the annotation set to be retrieved.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "annotations/{annotationId}",
+	//   "response": {
+	//     "$ref": "Annotation"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/genomics",
+	//     "https://www.googleapis.com/auth/genomics.readonly"
+	//   ]
+	// }
+
+}
+
+// method id "genomics.annotations.patch":
+
+type AnnotationsPatchCall struct {
+	s            *Service
+	annotationId string
+	annotation   *Annotation
+	opt_         map[string]interface{}
+}
+
+// Patch: Updates an annotation. The update must respect all mutability
+// restrictions and other invariants described on the annotation
+// resource. Caller must have WRITE permission for the associated
+// dataset. This method supports patch semantics.
+func (r *AnnotationsService) Patch(annotationId string, annotation *Annotation) *AnnotationsPatchCall {
+	c := &AnnotationsPatchCall{s: r.s, opt_: make(map[string]interface{})}
+	c.annotationId = annotationId
+	c.annotation = annotation
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AnnotationsPatchCall) Fields(s ...googleapi.Field) *AnnotationsPatchCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AnnotationsPatchCall) Do() (*Annotation, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.annotation)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "annotations/{annotationId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("PATCH", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"annotationId": c.annotationId,
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *Annotation
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Updates an annotation. The update must respect all mutability restrictions and other invariants described on the annotation resource. Caller must have WRITE permission for the associated dataset. This method supports patch semantics.",
+	//   "httpMethod": "PATCH",
+	//   "id": "genomics.annotations.patch",
+	//   "parameterOrder": [
+	//     "annotationId"
+	//   ],
+	//   "parameters": {
+	//     "annotationId": {
+	//       "description": "The ID of the annotation set to be updated.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "annotations/{annotationId}",
+	//   "request": {
+	//     "$ref": "Annotation"
+	//   },
+	//   "response": {
+	//     "$ref": "Annotation"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/genomics"
+	//   ]
+	// }
+
+}
+
+// method id "genomics.annotations.search":
+
+type AnnotationsSearchCall struct {
+	s                        *Service
+	searchannotationsrequest *SearchAnnotationsRequest
+	opt_                     map[string]interface{}
+}
+
+// Search: Searches for annotations which match the given criteria.
+// Results are returned ordered by start position. Annotations which
+// have matching start positions are ordered deterministically. Caller
+// must have READ permission for the queried annotation sets.
+func (r *AnnotationsService) Search(searchannotationsrequest *SearchAnnotationsRequest) *AnnotationsSearchCall {
+	c := &AnnotationsSearchCall{s: r.s, opt_: make(map[string]interface{})}
+	c.searchannotationsrequest = searchannotationsrequest
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AnnotationsSearchCall) Fields(s ...googleapi.Field) *AnnotationsSearchCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AnnotationsSearchCall) Do() (*SearchAnnotationsResponse, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.searchannotationsrequest)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "annotations/search")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("POST", urls, body)
+	googleapi.SetOpaque(req.URL)
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *SearchAnnotationsResponse
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Searches for annotations which match the given criteria. Results are returned ordered by start position. Annotations which have matching start positions are ordered deterministically. Caller must have READ permission for the queried annotation sets.",
+	//   "httpMethod": "POST",
+	//   "id": "genomics.annotations.search",
+	//   "path": "annotations/search",
+	//   "request": {
+	//     "$ref": "SearchAnnotationsRequest"
+	//   },
+	//   "response": {
+	//     "$ref": "SearchAnnotationsResponse"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/genomics",
+	//     "https://www.googleapis.com/auth/genomics.readonly"
+	//   ]
+	// }
+
+}
+
+// method id "genomics.annotations.update":
+
+type AnnotationsUpdateCall struct {
+	s            *Service
+	annotationId string
+	annotation   *Annotation
+	opt_         map[string]interface{}
+}
+
+// Update: Updates an annotation. The update must respect all mutability
+// restrictions and other invariants described on the annotation
+// resource. Caller must have WRITE permission for the associated
+// dataset.
+func (r *AnnotationsService) Update(annotationId string, annotation *Annotation) *AnnotationsUpdateCall {
+	c := &AnnotationsUpdateCall{s: r.s, opt_: make(map[string]interface{})}
+	c.annotationId = annotationId
+	c.annotation = annotation
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *AnnotationsUpdateCall) Fields(s ...googleapi.Field) *AnnotationsUpdateCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *AnnotationsUpdateCall) Do() (*Annotation, error) {
+	var body io.Reader = nil
+	body, err := googleapi.WithoutDataWrapper.JSONReader(c.annotation)
+	if err != nil {
+		return nil, err
+	}
+	ctype := "application/json"
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "annotations/{annotationId}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("PUT", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"annotationId": c.annotationId,
+	})
+	req.Header.Set("Content-Type", ctype)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *Annotation
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "description": "Updates an annotation. The update must respect all mutability restrictions and other invariants described on the annotation resource. Caller must have WRITE permission for the associated dataset.",
+	//   "httpMethod": "PUT",
+	//   "id": "genomics.annotations.update",
+	//   "parameterOrder": [
+	//     "annotationId"
+	//   ],
+	//   "parameters": {
+	//     "annotationId": {
+	//       "description": "The ID of the annotation set to be updated.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "annotations/{annotationId}",
+	//   "request": {
+	//     "$ref": "Annotation"
+	//   },
+	//   "response": {
+	//     "$ref": "Annotation"
+	//   },
+	//   "scopes": [
+	//     "https://www.googleapis.com/auth/genomics"
+	//   ]
+	// }
+
+}
+
 // method id "genomics.callsets.create":
 
 type CallsetsCreateCall struct {
diff --git a/gmail/v1/gmail-api.json b/gmail/v1/gmail-api.json
index 927b9b4..4cef877 100644
--- a/gmail/v1/gmail-api.json
+++ b/gmail/v1/gmail-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/bvP9PKeteRErWBIlbiucv-VU2no\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/B1g8jsdV_WYGpoHkWS2kMzxXR0s\"",
  "discoveryVersion": "v1",
  "id": "gmail:v1",
  "name": "gmail",
  "version": "v1",
- "revision": "20141204",
+ "revision": "20150120",
  "title": "Gmail API",
  "description": "The Gmail REST API.",
  "ownerDomain": "google.com",
diff --git a/gmail/v1/gmail-gen.go b/gmail/v1/gmail-gen.go
index 4f93bcc..2d35a62 100644
--- a/gmail/v1/gmail-gen.go
+++ b/gmail/v1/gmail-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "gmail:v1"
 const apiName = "gmail"
@@ -478,11 +480,15 @@
 // method id "gmail.users.drafts.create":
 
 type UsersDraftsCreateCall struct {
-	s      *Service
-	userId string
-	draft  *Draft
-	opt_   map[string]interface{}
-	media_ io.Reader
+	s          *Service
+	userId     string
+	draft      *Draft
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Create: Creates a new draft with the DRAFT label.
@@ -492,8 +498,32 @@
 	c.draft = draft
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *UsersDraftsCreateCall) Media(r io.Reader) *UsersDraftsCreateCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *UsersDraftsCreateCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *UsersDraftsCreateCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *UsersDraftsCreateCall) ProgressUpdater(pu googleapi.ProgressUpdater) *UsersDraftsCreateCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -518,20 +548,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "{userId}/drafts")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"userId": c.userId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -541,6 +593,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Draft
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -911,11 +978,15 @@
 // method id "gmail.users.drafts.send":
 
 type UsersDraftsSendCall struct {
-	s      *Service
-	userId string
-	draft  *Draft
-	opt_   map[string]interface{}
-	media_ io.Reader
+	s          *Service
+	userId     string
+	draft      *Draft
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Send: Sends the specified, existing draft to the recipients in the
@@ -926,8 +997,32 @@
 	c.draft = draft
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *UsersDraftsSendCall) Media(r io.Reader) *UsersDraftsSendCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *UsersDraftsSendCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *UsersDraftsSendCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *UsersDraftsSendCall) ProgressUpdater(pu googleapi.ProgressUpdater) *UsersDraftsSendCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -952,20 +1047,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "{userId}/drafts/send")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"userId": c.userId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -975,6 +1092,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Message
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -1032,12 +1164,16 @@
 // method id "gmail.users.drafts.update":
 
 type UsersDraftsUpdateCall struct {
-	s      *Service
-	userId string
-	id     string
-	draft  *Draft
-	opt_   map[string]interface{}
-	media_ io.Reader
+	s          *Service
+	userId     string
+	id         string
+	draft      *Draft
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Update: Replaces a draft's content.
@@ -1048,8 +1184,32 @@
 	c.draft = draft
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *UsersDraftsUpdateCall) Media(r io.Reader) *UsersDraftsUpdateCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *UsersDraftsUpdateCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *UsersDraftsUpdateCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *UsersDraftsUpdateCall) ProgressUpdater(pu googleapi.ProgressUpdater) *UsersDraftsUpdateCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -1074,21 +1234,43 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "{userId}/drafts/{id}")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("PUT", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"userId": c.userId,
 		"id":     c.id,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -1098,6 +1280,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Draft
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -2067,11 +2264,15 @@
 // method id "gmail.users.messages.import":
 
 type UsersMessagesImportCall struct {
-	s       *Service
-	userId  string
-	message *Message
-	opt_    map[string]interface{}
-	media_  io.Reader
+	s          *Service
+	userId     string
+	message    *Message
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Import: Imports a message into only this user's mailbox, with
@@ -2090,8 +2291,32 @@
 	c.opt_["internalDateSource"] = internalDateSource
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *UsersMessagesImportCall) Media(r io.Reader) *UsersMessagesImportCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *UsersMessagesImportCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *UsersMessagesImportCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *UsersMessagesImportCall) ProgressUpdater(pu googleapi.ProgressUpdater) *UsersMessagesImportCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -2119,20 +2344,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "{userId}/messages/import")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"userId": c.userId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -2142,6 +2389,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Message
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -2212,11 +2474,15 @@
 // method id "gmail.users.messages.insert":
 
 type UsersMessagesInsertCall struct {
-	s       *Service
-	userId  string
-	message *Message
-	opt_    map[string]interface{}
-	media_  io.Reader
+	s          *Service
+	userId     string
+	message    *Message
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Directly inserts a message into only this user's mailbox
@@ -2235,8 +2501,32 @@
 	c.opt_["internalDateSource"] = internalDateSource
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *UsersMessagesInsertCall) Media(r io.Reader) *UsersMessagesInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *UsersMessagesInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *UsersMessagesInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *UsersMessagesInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *UsersMessagesInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -2264,20 +2554,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "{userId}/messages")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"userId": c.userId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -2287,6 +2599,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Message
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -2615,11 +2942,15 @@
 // method id "gmail.users.messages.send":
 
 type UsersMessagesSendCall struct {
-	s       *Service
-	userId  string
-	message *Message
-	opt_    map[string]interface{}
-	media_  io.Reader
+	s          *Service
+	userId     string
+	message    *Message
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Send: Sends the specified message to the recipients in the To, Cc,
@@ -2630,8 +2961,32 @@
 	c.message = message
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *UsersMessagesSendCall) Media(r io.Reader) *UsersMessagesSendCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *UsersMessagesSendCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *UsersMessagesSendCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *UsersMessagesSendCall) ProgressUpdater(pu googleapi.ProgressUpdater) *UsersMessagesSendCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -2656,20 +3011,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "{userId}/messages/send")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"userId": c.userId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -2679,6 +3056,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Message
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
diff --git a/google-api-go-generator/gen.go b/google-api-go-generator/gen.go
index 81f9005..ba24643 100644
--- a/google-api-go-generator/gen.go
+++ b/google-api-go-generator/gen.go
@@ -1328,10 +1328,11 @@
 			pn(`ctype := "application/json"`)
 			hasContentType = true
 		}
-		pn("var contentLength_ int64")
 		pn("var hasMedia_ bool")
 		pn(`if c.protocol_ != "resumable" {`)
-		pn(" contentLength_, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)")
+		pn(`  var cancel func()`)
+		pn("  cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)")
+		pn("  if cancel != nil { defer cancel() }")
 		pn("}")
 	}
 	pn("req, _ := http.NewRequest(%q, urls, body)", httpMethod)
@@ -1361,7 +1362,6 @@
 		pn(`  return %sfmt.Errorf("resumable uploads must set the Name parameter.")`, nilRet)
 		pn(" }")
 		pn("} else if hasMedia_ {")
-		pn(" req.ContentLength = contentLength_")
 		pn(` req.Header.Set("Content-Type", ctype)`)
 		pn("}")
 	} else if hasContentType {
diff --git a/googleapi/googleapi.go b/googleapi/googleapi.go
index 8e9eaf3..ec37eb6 100644
--- a/googleapi/googleapi.go
+++ b/googleapi/googleapi.go
@@ -9,6 +9,7 @@
 import (
 	"bytes"
 	"encoding/json"
+	"errors"
 	"fmt"
 	"io"
 	"io/ioutil"
@@ -16,7 +17,6 @@
 	"net/http"
 	"net/textproto"
 	"net/url"
-	"os"
 	"regexp"
 	"strconv"
 	"strings"
@@ -199,38 +199,6 @@
 	return
 }
 
-func getReaderSize(r io.Reader) (io.Reader, int64) {
-	// Ideal case, the reader knows its own size.
-	if lr, ok := r.(Lengther); ok {
-		return r, int64(lr.Len())
-	}
-
-	// But maybe it's a seeker and we can seek to the end to find its size.
-	if s, ok := r.(io.Seeker); ok {
-		pos0, err := s.Seek(0, os.SEEK_CUR)
-		if err == nil {
-			posend, err := s.Seek(0, os.SEEK_END)
-			if err == nil {
-				_, err = s.Seek(pos0, os.SEEK_SET)
-				if err == nil {
-					return r, posend - pos0
-				} else {
-					// We moved it forward but can't restore it.
-					// Seems unlikely, but can't really restore now.
-					return endingWithErrorReader{strings.NewReader(""), err}, posend - pos0
-				}
-			}
-		}
-	}
-
-	// Otherwise we have to make a copy to calculate how big the reader is.
-	buf := new(bytes.Buffer)
-	// TODO(bradfitz): put a cap on this copy? spill to disk after
-	// a certain point?
-	_, err := io.Copy(buf, r)
-	return endingWithErrorReader{buf, err}, int64(buf.Len())
-}
-
 func typeHeader(contentType string) textproto.MIMEHeader {
 	h := make(textproto.MIMEHeader)
 	h.Set("Content-Type", contentType)
@@ -259,7 +227,7 @@
 // to the "multipart/related" content type, with random boundary.
 //
 // The return value is the content-length of the entire multpart body.
-func ConditionallyIncludeMedia(media io.Reader, bodyp *io.Reader, ctypep *string) (totalContentLength int64, ok bool) {
+func ConditionallyIncludeMedia(media io.Reader, bodyp *io.Reader, ctypep *string) (cancel func(), ok bool) {
 	if media == nil {
 		return
 	}
@@ -267,24 +235,9 @@
 	// different reader instance, so do the size check first,
 	// which looks at the specific type of the io.Reader.
 	var mediaType string
-	if typer, ok := media.(ContentTyper); ok {
-		mediaType = typer.ContentType()
-	}
-	media, mediaSize := getReaderSize(media)
-	if mediaType == "" {
-		media, mediaType = getMediaType(media)
-	}
-	body, bodyType := *bodyp, *ctypep
-	body, bodySize := getReaderSize(body)
+	media, mediaType = getMediaType(media)
 
-	// Calculate how big the the multipart will be.
-	{
-		totalContentLength = bodySize + mediaSize
-		mpw := multipart.NewWriter(countingWriter{&totalContentLength})
-		mpw.CreatePart(typeHeader(bodyType))
-		mpw.CreatePart(typeHeader(mediaType))
-		mpw.Close()
-	}
+	body, bodyType := *bodyp, *ctypep
 
 	pr, pw := io.Pipe()
 	mpw := multipart.NewWriter(pw)
@@ -312,9 +265,12 @@
 			return
 		}
 	}()
-	return totalContentLength, true
+	cancel = func() { pw.CloseWithError(errAborted) }
+	return cancel, true
 }
 
+var errAborted = errors.New("googleapi: upload aborted")
+
 // ProgressUpdater is a function that is called upon every progress update of a resumable upload.
 // This is the only part of a resumable upload (from googleapi) that is usable by the developer.
 // The remaining usable pieces of resumable uploads is exposed in each auto-generated API.
diff --git a/groupsmigration/v1/groupsmigration-gen.go b/groupsmigration/v1/groupsmigration-gen.go
index bc4e6fd..347a275 100644
--- a/groupsmigration/v1/groupsmigration-gen.go
+++ b/groupsmigration/v1/groupsmigration-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "groupsmigration:v1"
 const apiName = "groupsmigration"
@@ -75,10 +77,14 @@
 // method id "groupsmigration.archive.insert":
 
 type ArchiveInsertCall struct {
-	s       *Service
-	groupId string
-	opt_    map[string]interface{}
-	media_  io.Reader
+	s          *Service
+	groupId    string
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Inserts a new mail into the archive of the Google group.
@@ -87,8 +93,32 @@
 	c.groupId = groupId
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *ArchiveInsertCall) Media(r io.Reader) *ArchiveInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *ArchiveInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *ArchiveInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *ArchiveInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *ArchiveInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -108,22 +138,44 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "{groupId}/archive")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"groupId": c.groupId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -133,6 +185,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Groups
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
diff --git a/groupssettings/v1/groupssettings-gen.go b/groupssettings/v1/groupssettings-gen.go
index 1a74633..94856f7 100644
--- a/groupssettings/v1/groupssettings-gen.go
+++ b/groupssettings/v1/groupssettings-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "groupssettings:v1"
 const apiName = "groupssettings"
diff --git a/identitytoolkit/v3/identitytoolkit-gen.go b/identitytoolkit/v3/identitytoolkit-gen.go
index 7861fdb..6037d61 100644
--- a/identitytoolkit/v3/identitytoolkit-gen.go
+++ b/identitytoolkit/v3/identitytoolkit-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "identitytoolkit:v3"
 const apiName = "identitytoolkit"
diff --git a/licensing/v1/licensing-gen.go b/licensing/v1/licensing-gen.go
index 23a150a..0c788c6 100644
--- a/licensing/v1/licensing-gen.go
+++ b/licensing/v1/licensing-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "licensing:v1"
 const apiName = "licensing"
diff --git a/manager/v1beta2/manager-gen.go b/manager/v1beta2/manager-gen.go
index c4d7785..dae92af 100644
--- a/manager/v1beta2/manager-gen.go
+++ b/manager/v1beta2/manager-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "manager:v1beta2"
 const apiName = "manager"
diff --git a/mapsengine/exp2/mapsengine-api.json b/mapsengine/exp2/mapsengine-api.json
index 5678d2a..d0f5b0b 100644
--- a/mapsengine/exp2/mapsengine-api.json
+++ b/mapsengine/exp2/mapsengine-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/XDr7bmgk6_TzpUGFqVmOQUHb1wI\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/g5gYKe1RsGYlNs6bEqweIHyXDHA\"",
  "discoveryVersion": "v1",
  "id": "mapsengine:exp2",
  "name": "mapsengine",
  "canonicalName": "Maps Engine",
  "version": "exp2",
- "revision": "20141126",
+ "revision": "20150120",
  "title": "Google Maps Engine API",
  "description": "The Google Maps Engine API allows developers to store and query geospatial vector and raster data.",
  "ownerDomain": "google.com",
diff --git a/mapsengine/exp2/mapsengine-gen.go b/mapsengine/exp2/mapsengine-gen.go
index 7e50b06..f488471 100644
--- a/mapsengine/exp2/mapsengine-gen.go
+++ b/mapsengine/exp2/mapsengine-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "mapsengine:exp2"
 const apiName = "mapsengine"
@@ -5082,11 +5084,15 @@
 // method id "mapsengine.projects.icons.create":
 
 type ProjectsIconsCreateCall struct {
-	s         *Service
-	projectId string
-	icon      *Icon
-	opt_      map[string]interface{}
-	media_    io.Reader
+	s          *Service
+	projectId  string
+	icon       *Icon
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Create: Create an icon.
@@ -5096,8 +5102,32 @@
 	c.icon = icon
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *ProjectsIconsCreateCall) Media(r io.Reader) *ProjectsIconsCreateCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *ProjectsIconsCreateCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *ProjectsIconsCreateCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *ProjectsIconsCreateCall) ProgressUpdater(pu googleapi.ProgressUpdater) *ProjectsIconsCreateCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -5122,20 +5152,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/icons")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"projectId": c.projectId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -5145,6 +5197,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Icon
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -7631,11 +7698,15 @@
 // method id "mapsengine.rasters.files.insert":
 
 type RastersFilesInsertCall struct {
-	s        *Service
-	id       string
-	filename string
-	opt_     map[string]interface{}
-	media_   io.Reader
+	s          *Service
+	id         string
+	filename   string
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Upload a file to a raster asset.
@@ -7645,8 +7716,32 @@
 	c.filename = filename
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *RastersFilesInsertCall) Media(r io.Reader) *RastersFilesInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *RastersFilesInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *RastersFilesInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *RastersFilesInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *RastersFilesInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -7667,22 +7762,44 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "rasters/{id}/files")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"id": c.id,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -7692,6 +7809,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return err
+		}
+	}
 	return nil
 	// {
 	//   "description": "Upload a file to a raster asset.",
@@ -9518,11 +9650,15 @@
 // method id "mapsengine.tables.files.insert":
 
 type TablesFilesInsertCall struct {
-	s        *Service
-	id       string
-	filename string
-	opt_     map[string]interface{}
-	media_   io.Reader
+	s          *Service
+	id         string
+	filename   string
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Upload a file to a placeholder table asset. See Table Upload
@@ -9536,8 +9672,32 @@
 	c.filename = filename
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *TablesFilesInsertCall) Media(r io.Reader) *TablesFilesInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *TablesFilesInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *TablesFilesInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *TablesFilesInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *TablesFilesInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -9558,22 +9718,44 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "tables/{id}/files")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"id": c.id,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -9583,6 +9765,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return err
+		}
+	}
 	return nil
 	// {
 	//   "description": "Upload a file to a placeholder table asset. See Table Upload in the Developer's Guide for more information.\nSupported file types are listed in the Supported data formats and limits article of the Google Maps Engine help center.",
diff --git a/mapsengine/v1/mapsengine-api.json b/mapsengine/v1/mapsengine-api.json
index 2acd87a..864791d 100644
--- a/mapsengine/v1/mapsengine-api.json
+++ b/mapsengine/v1/mapsengine-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/yYnxzo1XQvFrShjfnboGeo8rHnM\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/xRQmfo4WYHt7ESOziu0ToVNCLZc\"",
  "discoveryVersion": "v1",
  "id": "mapsengine:v1",
  "name": "mapsengine",
  "canonicalName": "Maps Engine",
  "version": "v1",
- "revision": "20141126",
+ "revision": "20150120",
  "title": "Google Maps Engine API",
  "description": "The Google Maps Engine API allows developers to store and query geospatial vector and raster data.",
  "ownerDomain": "google.com",
diff --git a/mapsengine/v1/mapsengine-gen.go b/mapsengine/v1/mapsengine-gen.go
index 431c4bf..c1d7554 100644
--- a/mapsengine/v1/mapsengine-gen.go
+++ b/mapsengine/v1/mapsengine-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "mapsengine:v1"
 const apiName = "mapsengine"
@@ -5023,11 +5025,15 @@
 // method id "mapsengine.projects.icons.create":
 
 type ProjectsIconsCreateCall struct {
-	s         *Service
-	projectId string
-	icon      *Icon
-	opt_      map[string]interface{}
-	media_    io.Reader
+	s          *Service
+	projectId  string
+	icon       *Icon
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Create: Create an icon.
@@ -5037,8 +5043,32 @@
 	c.icon = icon
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *ProjectsIconsCreateCall) Media(r io.Reader) *ProjectsIconsCreateCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *ProjectsIconsCreateCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *ProjectsIconsCreateCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *ProjectsIconsCreateCall) ProgressUpdater(pu googleapi.ProgressUpdater) *ProjectsIconsCreateCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -5063,20 +5093,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "projects/{projectId}/icons")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"projectId": c.projectId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -5086,6 +5138,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Icon
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -7572,11 +7639,15 @@
 // method id "mapsengine.rasters.files.insert":
 
 type RastersFilesInsertCall struct {
-	s        *Service
-	id       string
-	filename string
-	opt_     map[string]interface{}
-	media_   io.Reader
+	s          *Service
+	id         string
+	filename   string
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Upload a file to a raster asset.
@@ -7586,8 +7657,32 @@
 	c.filename = filename
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *RastersFilesInsertCall) Media(r io.Reader) *RastersFilesInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *RastersFilesInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *RastersFilesInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *RastersFilesInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *RastersFilesInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -7608,22 +7703,44 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "rasters/{id}/files")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"id": c.id,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -7633,6 +7750,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return err
+		}
+	}
 	return nil
 	// {
 	//   "description": "Upload a file to a raster asset.",
@@ -9459,11 +9591,15 @@
 // method id "mapsengine.tables.files.insert":
 
 type TablesFilesInsertCall struct {
-	s        *Service
-	id       string
-	filename string
-	opt_     map[string]interface{}
-	media_   io.Reader
+	s          *Service
+	id         string
+	filename   string
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Upload a file to a placeholder table asset. See Table Upload
@@ -9477,8 +9613,32 @@
 	c.filename = filename
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *TablesFilesInsertCall) Media(r io.Reader) *TablesFilesInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *TablesFilesInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *TablesFilesInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *TablesFilesInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *TablesFilesInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -9499,22 +9659,44 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "tables/{id}/files")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"id": c.id,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -9524,6 +9706,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return err
+		}
+	}
 	return nil
 	// {
 	//   "description": "Upload a file to a placeholder table asset. See Table Upload in the Developer's Guide for more information.\nSupported file types are listed in the Supported data formats and limits article of the Google Maps Engine help center.",
diff --git a/mirror/v1/mirror-gen.go b/mirror/v1/mirror-gen.go
index 72ecd64..f83afbd 100644
--- a/mirror/v1/mirror-gen.go
+++ b/mirror/v1/mirror-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "mirror:v1"
 const apiName = "mirror"
@@ -1950,6 +1952,10 @@
 	timelineitem *TimelineItem
 	opt_         map[string]interface{}
 	media_       io.Reader
+	resumable_   googleapi.SizeReaderAt
+	mediaType_   string
+	ctx_         context.Context
+	protocol_    string
 }
 
 // Insert: Inserts a new item into the timeline.
@@ -1958,8 +1964,32 @@
 	c.timelineitem = timelineitem
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *TimelineInsertCall) Media(r io.Reader) *TimelineInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *TimelineInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *TimelineInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *TimelineInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *TimelineInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -1984,18 +2014,40 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "timeline")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.SetOpaque(req.URL)
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -2005,6 +2057,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *TimelineItem
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -2323,6 +2390,10 @@
 	timelineitem *TimelineItem
 	opt_         map[string]interface{}
 	media_       io.Reader
+	resumable_   googleapi.SizeReaderAt
+	mediaType_   string
+	ctx_         context.Context
+	protocol_    string
 }
 
 // Update: Updates a timeline item in place.
@@ -2332,8 +2403,32 @@
 	c.timelineitem = timelineitem
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *TimelineUpdateCall) Media(r io.Reader) *TimelineUpdateCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *TimelineUpdateCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *TimelineUpdateCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *TimelineUpdateCall) ProgressUpdater(pu googleapi.ProgressUpdater) *TimelineUpdateCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -2358,20 +2453,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "timeline/{id}")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("PUT", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"id": c.id,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -2381,6 +2498,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *TimelineItem
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -2605,10 +2737,14 @@
 // method id "mirror.timeline.attachments.insert":
 
 type TimelineAttachmentsInsertCall struct {
-	s      *Service
-	itemId string
-	opt_   map[string]interface{}
-	media_ io.Reader
+	s          *Service
+	itemId     string
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Adds a new attachment to a timeline item.
@@ -2617,8 +2753,32 @@
 	c.itemId = itemId
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *TimelineAttachmentsInsertCall) Media(r io.Reader) *TimelineAttachmentsInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *TimelineAttachmentsInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *TimelineAttachmentsInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *TimelineAttachmentsInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *TimelineAttachmentsInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -2638,22 +2798,44 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "timeline/{itemId}/attachments")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"itemId": c.itemId,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -2663,6 +2845,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Attachment
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
diff --git a/oauth2/v1/oauth2-api.json b/oauth2/v1/oauth2-api.json
index 66c4226..49e6a57 100644
--- a/oauth2/v1/oauth2-api.json
+++ b/oauth2/v1/oauth2-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/YHYuOKmsUcxnyDdYX9TyTF0HnOg\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/9Yyv0tXok5k5vzXMjMvbTiT4GAM\"",
  "discoveryVersion": "v1",
  "id": "oauth2:v1",
  "name": "oauth2",
  "version": "v1",
- "revision": "20150106",
+ "revision": "20150120",
  "title": "Google OAuth2 API",
  "description": "Lets you access OAuth2 protocol related APIs.",
  "ownerDomain": "google.com",
@@ -85,6 +85,33 @@
   }
  },
  "schemas": {
+  "Raw": {
+   "id": "Raw",
+   "type": "object",
+   "properties": {
+    "keyvalues": {
+     "type": "array",
+     "items": {
+      "type": "object",
+      "properties": {
+       "algorithm": {
+        "type": "string",
+        "default": "RSA"
+       },
+       "exponent": {
+        "type": "string"
+       },
+       "keyid": {
+        "type": "string"
+       },
+       "modulus": {
+        "type": "string"
+       }
+      }
+     }
+    }
+   }
+  },
   "Tokeninfo": {
    "id": "Tokeninfo",
    "type": "object",
@@ -191,9 +218,70 @@
      "default": "true"
     }
    }
+  },
+  "X509": {
+   "id": "X509",
+   "type": "object",
+   "additionalProperties": {
+    "type": "string"
+   }
   }
  },
  "methods": {
+  "getCertForOpenIdConnect": {
+   "id": "oauth2.getCertForOpenIdConnect",
+   "path": "oauth2/v1/certs",
+   "httpMethod": "GET",
+   "response": {
+    "$ref": "X509"
+   }
+  },
+  "getCertForOpenIdConnectRaw": {
+   "id": "oauth2.getCertForOpenIdConnectRaw",
+   "path": "oauth2/v1/raw_public_keys",
+   "httpMethod": "GET",
+   "response": {
+    "$ref": "Raw"
+   }
+  },
+  "getRobotMetadataRaw": {
+   "id": "oauth2.getRobotMetadataRaw",
+   "path": "service_accounts/v1/metadata/raw/{robotEmail}",
+   "httpMethod": "GET",
+   "parameters": {
+    "robotEmail": {
+     "type": "string",
+     "description": "The email of robot account.",
+     "required": true,
+     "location": "path"
+    }
+   },
+   "parameterOrder": [
+    "robotEmail"
+   ],
+   "response": {
+    "$ref": "Raw"
+   }
+  },
+  "getRobotMetadataX509": {
+   "id": "oauth2.getRobotMetadataX509",
+   "path": "service_accounts/v1/metadata/x509/{robotEmail}",
+   "httpMethod": "GET",
+   "parameters": {
+    "robotEmail": {
+     "type": "string",
+     "description": "The email of robot account.",
+     "required": true,
+     "location": "path"
+    }
+   },
+   "parameterOrder": [
+    "robotEmail"
+   ],
+   "response": {
+    "$ref": "X509"
+   }
+  },
   "tokeninfo": {
    "id": "oauth2.tokeninfo",
    "path": "oauth2/v1/tokeninfo",
diff --git a/oauth2/v1/oauth2-gen.go b/oauth2/v1/oauth2-gen.go
index 043cf90..f78798b 100644
--- a/oauth2/v1/oauth2-gen.go
+++ b/oauth2/v1/oauth2-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "oauth2:v1"
 const apiName = "oauth2"
@@ -103,6 +105,20 @@
 	s *Service
 }
 
+type Raw struct {
+	Keyvalues []*RawKeyvalues `json:"keyvalues,omitempty"`
+}
+
+type RawKeyvalues struct {
+	Algorithm string `json:"algorithm,omitempty"`
+
+	Exponent string `json:"exponent,omitempty"`
+
+	Keyid string `json:"keyid,omitempty"`
+
+	Modulus string `json:"modulus,omitempty"`
+}
+
 type Tokeninfo struct {
 	// Access_type: The access type granted with this token. It can be
 	// offline or online.
@@ -186,6 +202,264 @@
 	Verified_email bool `json:"verified_email,omitempty"`
 }
 
+// method id "oauth2.getCertForOpenIdConnect":
+
+type GetCertForOpenIdConnectCall struct {
+	s    *Service
+	opt_ map[string]interface{}
+}
+
+// GetCertForOpenIdConnect:
+func (s *Service) GetCertForOpenIdConnect() *GetCertForOpenIdConnectCall {
+	c := &GetCertForOpenIdConnectCall{s: s, opt_: make(map[string]interface{})}
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *GetCertForOpenIdConnectCall) Fields(s ...googleapi.Field) *GetCertForOpenIdConnectCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *GetCertForOpenIdConnectCall) Do() (map[string]string, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "oauth2/v1/certs")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.SetOpaque(req.URL)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret map[string]string
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "httpMethod": "GET",
+	//   "id": "oauth2.getCertForOpenIdConnect",
+	//   "path": "oauth2/v1/certs",
+	//   "response": {
+	//     "$ref": "X509"
+	//   }
+	// }
+
+}
+
+// method id "oauth2.getCertForOpenIdConnectRaw":
+
+type GetCertForOpenIdConnectRawCall struct {
+	s    *Service
+	opt_ map[string]interface{}
+}
+
+// GetCertForOpenIdConnectRaw:
+func (s *Service) GetCertForOpenIdConnectRaw() *GetCertForOpenIdConnectRawCall {
+	c := &GetCertForOpenIdConnectRawCall{s: s, opt_: make(map[string]interface{})}
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *GetCertForOpenIdConnectRawCall) Fields(s ...googleapi.Field) *GetCertForOpenIdConnectRawCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *GetCertForOpenIdConnectRawCall) Do() (*Raw, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "oauth2/v1/raw_public_keys")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.SetOpaque(req.URL)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *Raw
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "httpMethod": "GET",
+	//   "id": "oauth2.getCertForOpenIdConnectRaw",
+	//   "path": "oauth2/v1/raw_public_keys",
+	//   "response": {
+	//     "$ref": "Raw"
+	//   }
+	// }
+
+}
+
+// method id "oauth2.getRobotMetadataRaw":
+
+type GetRobotMetadataRawCall struct {
+	s          *Service
+	robotEmail string
+	opt_       map[string]interface{}
+}
+
+// GetRobotMetadataRaw:
+func (s *Service) GetRobotMetadataRaw(robotEmail string) *GetRobotMetadataRawCall {
+	c := &GetRobotMetadataRawCall{s: s, opt_: make(map[string]interface{})}
+	c.robotEmail = robotEmail
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *GetRobotMetadataRawCall) Fields(s ...googleapi.Field) *GetRobotMetadataRawCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *GetRobotMetadataRawCall) Do() (*Raw, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "service_accounts/v1/metadata/raw/{robotEmail}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"robotEmail": c.robotEmail,
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *Raw
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "httpMethod": "GET",
+	//   "id": "oauth2.getRobotMetadataRaw",
+	//   "parameterOrder": [
+	//     "robotEmail"
+	//   ],
+	//   "parameters": {
+	//     "robotEmail": {
+	//       "description": "The email of robot account.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "service_accounts/v1/metadata/raw/{robotEmail}",
+	//   "response": {
+	//     "$ref": "Raw"
+	//   }
+	// }
+
+}
+
+// method id "oauth2.getRobotMetadataX509":
+
+type GetRobotMetadataX509Call struct {
+	s          *Service
+	robotEmail string
+	opt_       map[string]interface{}
+}
+
+// GetRobotMetadataX509:
+func (s *Service) GetRobotMetadataX509(robotEmail string) *GetRobotMetadataX509Call {
+	c := &GetRobotMetadataX509Call{s: s, opt_: make(map[string]interface{})}
+	c.robotEmail = robotEmail
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *GetRobotMetadataX509Call) Fields(s ...googleapi.Field) *GetRobotMetadataX509Call {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *GetRobotMetadataX509Call) Do() (map[string]string, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "service_accounts/v1/metadata/x509/{robotEmail}")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.Expand(req.URL, map[string]string{
+		"robotEmail": c.robotEmail,
+	})
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret map[string]string
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "httpMethod": "GET",
+	//   "id": "oauth2.getRobotMetadataX509",
+	//   "parameterOrder": [
+	//     "robotEmail"
+	//   ],
+	//   "parameters": {
+	//     "robotEmail": {
+	//       "description": "The email of robot account.",
+	//       "location": "path",
+	//       "required": true,
+	//       "type": "string"
+	//     }
+	//   },
+	//   "path": "service_accounts/v1/metadata/x509/{robotEmail}",
+	//   "response": {
+	//     "$ref": "X509"
+	//   }
+	// }
+
+}
+
 // method id "oauth2.tokeninfo":
 
 type TokeninfoCall struct {
diff --git a/oauth2/v2/oauth2-api.json b/oauth2/v2/oauth2-api.json
index 06a1710..e6b6d5c 100644
--- a/oauth2/v2/oauth2-api.json
+++ b/oauth2/v2/oauth2-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/rUypcW3W8YGLTpyNoUxcj8xEnns\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/x7FThRC1OZmHwbkYbDOWKsWuUA8\"",
  "discoveryVersion": "v1",
  "id": "oauth2:v2",
  "name": "oauth2",
  "version": "v2",
- "revision": "20150106",
+ "revision": "20150120",
  "title": "Google OAuth2 API",
  "description": "Lets you access OAuth2 protocol related APIs.",
  "ownerDomain": "google.com",
@@ -85,6 +85,41 @@
   }
  },
  "schemas": {
+  "Jwk": {
+   "id": "Jwk",
+   "type": "object",
+   "properties": {
+    "keys": {
+     "type": "array",
+     "items": {
+      "type": "object",
+      "properties": {
+       "alg": {
+        "type": "string",
+        "default": "RS256"
+       },
+       "e": {
+        "type": "string"
+       },
+       "kid": {
+        "type": "string"
+       },
+       "kty": {
+        "type": "string",
+        "default": "RSA"
+       },
+       "n": {
+        "type": "string"
+       },
+       "use": {
+        "type": "string",
+        "default": "sig"
+       }
+      }
+     }
+    }
+   }
+  },
   "Tokeninfo": {
    "id": "Tokeninfo",
    "type": "object",
@@ -177,6 +212,14 @@
   }
  },
  "methods": {
+  "getCertForOpenIdConnect": {
+   "id": "oauth2.getCertForOpenIdConnect",
+   "path": "oauth2/v2/certs",
+   "httpMethod": "GET",
+   "response": {
+    "$ref": "Jwk"
+   }
+  },
   "tokeninfo": {
    "id": "oauth2.tokeninfo",
    "path": "oauth2/v2/tokeninfo",
diff --git a/oauth2/v2/oauth2-gen.go b/oauth2/v2/oauth2-gen.go
index f4408c2..9ad0b6f 100644
--- a/oauth2/v2/oauth2-gen.go
+++ b/oauth2/v2/oauth2-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "oauth2:v2"
 const apiName = "oauth2"
@@ -103,6 +105,24 @@
 	s *Service
 }
 
+type Jwk struct {
+	Keys []*JwkKeys `json:"keys,omitempty"`
+}
+
+type JwkKeys struct {
+	Alg string `json:"alg,omitempty"`
+
+	E string `json:"e,omitempty"`
+
+	Kid string `json:"kid,omitempty"`
+
+	Kty string `json:"kty,omitempty"`
+
+	N string `json:"n,omitempty"`
+
+	Use string `json:"use,omitempty"`
+}
+
 type Tokeninfo struct {
 	// Access_type: The access type granted with this token. It can be
 	// offline or online.
@@ -173,6 +193,63 @@
 	Verified_email bool `json:"verified_email,omitempty"`
 }
 
+// method id "oauth2.getCertForOpenIdConnect":
+
+type GetCertForOpenIdConnectCall struct {
+	s    *Service
+	opt_ map[string]interface{}
+}
+
+// GetCertForOpenIdConnect:
+func (s *Service) GetCertForOpenIdConnect() *GetCertForOpenIdConnectCall {
+	c := &GetCertForOpenIdConnectCall{s: s, opt_: make(map[string]interface{})}
+	return c
+}
+
+// Fields allows partial responses to be retrieved.
+// See https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
+// for more information.
+func (c *GetCertForOpenIdConnectCall) Fields(s ...googleapi.Field) *GetCertForOpenIdConnectCall {
+	c.opt_["fields"] = googleapi.CombineFields(s)
+	return c
+}
+
+func (c *GetCertForOpenIdConnectCall) Do() (*Jwk, error) {
+	var body io.Reader = nil
+	params := make(url.Values)
+	params.Set("alt", "json")
+	if v, ok := c.opt_["fields"]; ok {
+		params.Set("fields", fmt.Sprintf("%v", v))
+	}
+	urls := googleapi.ResolveRelative(c.s.BasePath, "oauth2/v2/certs")
+	urls += "?" + params.Encode()
+	req, _ := http.NewRequest("GET", urls, body)
+	googleapi.SetOpaque(req.URL)
+	req.Header.Set("User-Agent", "google-api-go-client/0.5")
+	res, err := c.s.client.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer googleapi.CloseBody(res)
+	if err := googleapi.CheckResponse(res); err != nil {
+		return nil, err
+	}
+	var ret *Jwk
+	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
+		return nil, err
+	}
+	return ret, nil
+	// {
+	//   "httpMethod": "GET",
+	//   "id": "oauth2.getCertForOpenIdConnect",
+	//   "path": "oauth2/v2/certs",
+	//   "response": {
+	//     "$ref": "Jwk"
+	//   }
+	// }
+
+}
+
 // method id "oauth2.tokeninfo":
 
 type TokeninfoCall struct {
diff --git a/pagespeedonline/v1/pagespeedonline-gen.go b/pagespeedonline/v1/pagespeedonline-gen.go
index 0a57745..ef34aa4 100644
--- a/pagespeedonline/v1/pagespeedonline-gen.go
+++ b/pagespeedonline/v1/pagespeedonline-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "pagespeedonline:v1"
 const apiName = "pagespeedonline"
diff --git a/pagespeedonline/v2/pagespeedonline-gen.go b/pagespeedonline/v2/pagespeedonline-gen.go
index 6406ed0..e2eed29 100644
--- a/pagespeedonline/v2/pagespeedonline-gen.go
+++ b/pagespeedonline/v2/pagespeedonline-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "pagespeedonline:v2"
 const apiName = "pagespeedonline"
diff --git a/plus/v1/plus-api.json b/plus/v1/plus-api.json
index 80b2812..6619668 100644
--- a/plus/v1/plus-api.json
+++ b/plus/v1/plus-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/9M-Q9toWdNdH7Kb8ozAS-goffGE\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/yIlgMiAhuGzU5ET3NtXZn6hNffI\"",
  "discoveryVersion": "v1",
  "id": "plus:v1",
  "name": "plus",
  "version": "v1",
- "revision": "20141218",
+ "revision": "20150105",
  "title": "Google+ API",
  "description": "The Google+ API enables developers to build on top of the Google+ platform.",
  "ownerDomain": "google.com",
diff --git a/plus/v1/plus-gen.go b/plus/v1/plus-gen.go
index 4992639..876f6f1 100644
--- a/plus/v1/plus-gen.go
+++ b/plus/v1/plus-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "plus:v1"
 const apiName = "plus"
diff --git a/plusdomains/v1/plusdomains-api.json b/plusdomains/v1/plusdomains-api.json
index ed80ebf..a14c6b6 100644
--- a/plusdomains/v1/plusdomains-api.json
+++ b/plusdomains/v1/plusdomains-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/4SkIij8adsUiDnWMXdMYi3Ud5JI\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/Dp57l-pfCxeHpBW5IqweXjCaYQ4\"",
  "discoveryVersion": "v1",
  "id": "plusDomains:v1",
  "name": "plusDomains",
  "version": "v1",
- "revision": "20141218",
+ "revision": "20150105",
  "title": "Google+ Domains API",
  "description": "The Google+ API enables developers to build on top of the Google+ platform.",
  "ownerDomain": "google.com",
diff --git a/plusdomains/v1/plusdomains-gen.go b/plusdomains/v1/plusdomains-gen.go
index 178fa59..7a22faf 100644
--- a/plusdomains/v1/plusdomains-gen.go
+++ b/plusdomains/v1/plusdomains-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "plusDomains:v1"
 const apiName = "plusDomains"
@@ -2718,6 +2720,10 @@
 	media      *Media
 	opt_       map[string]interface{}
 	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Add a new media item to an album. The current upload size
@@ -2731,8 +2737,32 @@
 	c.media = media
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *MediaInsertCall) Media(r io.Reader) *MediaInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *MediaInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *MediaInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *MediaInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *MediaInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -2757,21 +2787,43 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "people/{userId}/media/{collection}")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"userId":     c.userId,
 		"collection": c.collection,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -2781,6 +2833,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Media
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
diff --git a/prediction/v1.2/prediction-gen.go b/prediction/v1.2/prediction-gen.go
index 1d0cfcf..0e4c0ad 100644
--- a/prediction/v1.2/prediction-gen.go
+++ b/prediction/v1.2/prediction-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "prediction:v1.2"
 const apiName = "prediction"
diff --git a/prediction/v1.3/prediction-gen.go b/prediction/v1.3/prediction-gen.go
index a311dc2..02d6b56 100644
--- a/prediction/v1.3/prediction-gen.go
+++ b/prediction/v1.3/prediction-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "prediction:v1.3"
 const apiName = "prediction"
diff --git a/prediction/v1.4/prediction-gen.go b/prediction/v1.4/prediction-gen.go
index 179282e..76a047c 100644
--- a/prediction/v1.4/prediction-gen.go
+++ b/prediction/v1.4/prediction-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "prediction:v1.4"
 const apiName = "prediction"
diff --git a/prediction/v1.5/prediction-gen.go b/prediction/v1.5/prediction-gen.go
index 1268d33..a476b2f 100644
--- a/prediction/v1.5/prediction-gen.go
+++ b/prediction/v1.5/prediction-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "prediction:v1.5"
 const apiName = "prediction"
diff --git a/prediction/v1.6/prediction-gen.go b/prediction/v1.6/prediction-gen.go
index 9bfb9ac..a3cea39 100644
--- a/prediction/v1.6/prediction-gen.go
+++ b/prediction/v1.6/prediction-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "prediction:v1.6"
 const apiName = "prediction"
diff --git a/pubsub/v1beta1/pubsub-gen.go b/pubsub/v1beta1/pubsub-gen.go
index e352f54..b746b9e 100644
--- a/pubsub/v1beta1/pubsub-gen.go
+++ b/pubsub/v1beta1/pubsub-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "pubsub:v1beta1"
 const apiName = "pubsub"
diff --git a/qpxexpress/v1/qpxexpress-gen.go b/qpxexpress/v1/qpxexpress-gen.go
index 0f211fa..5beb9f2 100644
--- a/qpxexpress/v1/qpxexpress-gen.go
+++ b/qpxexpress/v1/qpxexpress-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "qpxExpress:v1"
 const apiName = "qpxExpress"
diff --git a/replicapool/v1beta1/replicapool-gen.go b/replicapool/v1beta1/replicapool-gen.go
index 0ebb40e..34f35ec 100644
--- a/replicapool/v1beta1/replicapool-gen.go
+++ b/replicapool/v1beta1/replicapool-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "replicapool:v1beta1"
 const apiName = "replicapool"
diff --git a/replicapool/v1beta2/replicapool-gen.go b/replicapool/v1beta2/replicapool-gen.go
index dc8ec88..f12edea 100644
--- a/replicapool/v1beta2/replicapool-gen.go
+++ b/replicapool/v1beta2/replicapool-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "replicapool:v1beta2"
 const apiName = "replicapool"
diff --git a/replicapoolupdater/v1beta1/replicapoolupdater-api.json b/replicapoolupdater/v1beta1/replicapoolupdater-api.json
index d2c54ae..4e8eed8 100644
--- a/replicapoolupdater/v1beta1/replicapoolupdater-api.json
+++ b/replicapoolupdater/v1beta1/replicapoolupdater-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/AT-fnbTxTHlPDlbWlx4KZrEt53M\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/MuNbTn-U7uo4q5kIHWudYUMbnVc\"",
  "discoveryVersion": "v1",
  "id": "replicapoolupdater:v1beta1",
  "name": "replicapoolupdater",
  "version": "v1beta1",
- "revision": "20141227",
+ "revision": "20150114",
  "title": "Google Compute Engine Instance Group Updater API",
  "description": "The Google Compute Engine Instance Group Updater API provides services for updating groups of Compute Engine Instances.",
  "ownerDomain": "google.com",
@@ -143,6 +143,10 @@
    "type": "object",
    "description": "Resource describing a single update (rollout) of a group of instances to the given template.",
    "properties": {
+    "actionType": {
+     "type": "string",
+     "description": "Action to be performed for each instance. Possible values are:  \n- \"RECREATE\": Instance will be recreated."
+    },
     "creationTimestamp": {
      "type": "string",
      "description": "[Output Only] Creation timestamp in RFC3339 text format."
@@ -362,6 +366,11 @@
      "httpMethod": "GET",
      "description": "Lists recent updates for a given managed instance group, in reverse chronological order and paginated format.",
      "parameters": {
+      "filter": {
+       "type": "string",
+       "description": "Optional. Filter expression for filtering listed resources.",
+       "location": "query"
+      },
       "instanceGroupManager": {
        "type": "string",
        "description": "The name of the instance group manager.",
@@ -369,16 +378,16 @@
       },
       "maxResults": {
        "type": "integer",
-       "description": "Maximum count of results to be returned. Acceptable values are 1 to 100, inclusive. (Default: 50)",
-       "default": "50",
-       "format": "int32",
-       "minimum": "1",
-       "maximum": "100",
+       "description": "Optional. Maximum count of results to be returned. Maximum value is 500 and default value is 500.",
+       "default": "500",
+       "format": "uint32",
+       "minimum": "0",
+       "maximum": "500",
        "location": "query"
       },
       "pageToken": {
        "type": "string",
-       "description": "Set this to the nextPageToken value returned by a previous list request to obtain the next page of results from the previous list request.",
+       "description": "Optional. Tag returned by a previous list request truncated by maxResults. Used to continue a previous list request.",
        "location": "query"
       },
       "project": {
@@ -414,18 +423,23 @@
      "httpMethod": "GET",
      "description": "Lists the current status for each instance within a given update.",
      "parameters": {
+      "filter": {
+       "type": "string",
+       "description": "Optional. Filter expression for filtering listed resources.",
+       "location": "query"
+      },
       "maxResults": {
        "type": "integer",
-       "description": "Maximum count of results to be returned. Acceptable values are 1 to 100, inclusive. (Default: 50)",
-       "default": "50",
-       "format": "int32",
-       "minimum": "1",
-       "maximum": "100",
+       "description": "Optional. Maximum count of results to be returned. Maximum value is 500 and default value is 500.",
+       "default": "500",
+       "format": "uint32",
+       "minimum": "0",
+       "maximum": "500",
        "location": "query"
       },
       "pageToken": {
        "type": "string",
-       "description": "Set this to the nextPageToken value returned by a previous list request to obtain the next page of results from the previous list request.",
+       "description": "Optional. Tag returned by a previous list request truncated by maxResults. Used to continue a previous list request.",
        "location": "query"
       },
       "project": {
diff --git a/replicapoolupdater/v1beta1/replicapoolupdater-gen.go b/replicapoolupdater/v1beta1/replicapoolupdater-gen.go
index c0704cf..1bf37c5 100644
--- a/replicapoolupdater/v1beta1/replicapoolupdater-gen.go
+++ b/replicapoolupdater/v1beta1/replicapoolupdater-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "replicapoolupdater:v1beta1"
 const apiName = "replicapoolupdater"
@@ -120,6 +122,11 @@
 }
 
 type RollingUpdate struct {
+	// ActionType: Action to be performed for each instance. Possible values
+	// are:
+	// - "RECREATE": Instance will be recreated.
+	ActionType string `json:"actionType,omitempty"`
+
 	// CreationTimestamp: [Output Only] Creation timestamp in RFC3339 text
 	// format.
 	CreationTimestamp string `json:"creationTimestamp,omitempty"`
@@ -527,6 +534,13 @@
 	return c
 }
 
+// Filter sets the optional parameter "filter": Filter expression for
+// filtering listed resources.
+func (c *RollingUpdatesListCall) Filter(filter string) *RollingUpdatesListCall {
+	c.opt_["filter"] = filter
+	return c
+}
+
 // InstanceGroupManager sets the optional parameter
 // "instanceGroupManager": The name of the instance group manager.
 func (c *RollingUpdatesListCall) InstanceGroupManager(instanceGroupManager string) *RollingUpdatesListCall {
@@ -535,16 +549,16 @@
 }
 
 // MaxResults sets the optional parameter "maxResults": Maximum count of
-// results to be returned. Acceptable values are 1 to 100, inclusive.
-// (Default: 50)
+// results to be returned. Maximum value is 500 and default value is
+// 500.
 func (c *RollingUpdatesListCall) MaxResults(maxResults int64) *RollingUpdatesListCall {
 	c.opt_["maxResults"] = maxResults
 	return c
 }
 
-// PageToken sets the optional parameter "pageToken": Set this to the
-// nextPageToken value returned by a previous list request to obtain the
-// next page of results from the previous list request.
+// PageToken sets the optional parameter "pageToken": Tag returned by a
+// previous list request truncated by maxResults. Used to continue a
+// previous list request.
 func (c *RollingUpdatesListCall) PageToken(pageToken string) *RollingUpdatesListCall {
 	c.opt_["pageToken"] = pageToken
 	return c
@@ -562,6 +576,9 @@
 	var body io.Reader = nil
 	params := make(url.Values)
 	params.Set("alt", "json")
+	if v, ok := c.opt_["filter"]; ok {
+		params.Set("filter", fmt.Sprintf("%v", v))
+	}
 	if v, ok := c.opt_["instanceGroupManager"]; ok {
 		params.Set("instanceGroupManager", fmt.Sprintf("%v", v))
 	}
@@ -604,22 +621,27 @@
 	//     "zone"
 	//   ],
 	//   "parameters": {
+	//     "filter": {
+	//       "description": "Optional. Filter expression for filtering listed resources.",
+	//       "location": "query",
+	//       "type": "string"
+	//     },
 	//     "instanceGroupManager": {
 	//       "description": "The name of the instance group manager.",
 	//       "location": "query",
 	//       "type": "string"
 	//     },
 	//     "maxResults": {
-	//       "default": "50",
-	//       "description": "Maximum count of results to be returned. Acceptable values are 1 to 100, inclusive. (Default: 50)",
-	//       "format": "int32",
+	//       "default": "500",
+	//       "description": "Optional. Maximum count of results to be returned. Maximum value is 500 and default value is 500.",
+	//       "format": "uint32",
 	//       "location": "query",
-	//       "maximum": "100",
-	//       "minimum": "1",
+	//       "maximum": "500",
+	//       "minimum": "0",
 	//       "type": "integer"
 	//     },
 	//     "pageToken": {
-	//       "description": "Set this to the nextPageToken value returned by a previous list request to obtain the next page of results from the previous list request.",
+	//       "description": "Optional. Tag returned by a previous list request truncated by maxResults. Used to continue a previous list request.",
 	//       "location": "query",
 	//       "type": "string"
 	//     },
@@ -671,17 +693,24 @@
 	return c
 }
 
+// Filter sets the optional parameter "filter": Filter expression for
+// filtering listed resources.
+func (c *RollingUpdatesListInstanceUpdatesCall) Filter(filter string) *RollingUpdatesListInstanceUpdatesCall {
+	c.opt_["filter"] = filter
+	return c
+}
+
 // MaxResults sets the optional parameter "maxResults": Maximum count of
-// results to be returned. Acceptable values are 1 to 100, inclusive.
-// (Default: 50)
+// results to be returned. Maximum value is 500 and default value is
+// 500.
 func (c *RollingUpdatesListInstanceUpdatesCall) MaxResults(maxResults int64) *RollingUpdatesListInstanceUpdatesCall {
 	c.opt_["maxResults"] = maxResults
 	return c
 }
 
-// PageToken sets the optional parameter "pageToken": Set this to the
-// nextPageToken value returned by a previous list request to obtain the
-// next page of results from the previous list request.
+// PageToken sets the optional parameter "pageToken": Tag returned by a
+// previous list request truncated by maxResults. Used to continue a
+// previous list request.
 func (c *RollingUpdatesListInstanceUpdatesCall) PageToken(pageToken string) *RollingUpdatesListInstanceUpdatesCall {
 	c.opt_["pageToken"] = pageToken
 	return c
@@ -699,6 +728,9 @@
 	var body io.Reader = nil
 	params := make(url.Values)
 	params.Set("alt", "json")
+	if v, ok := c.opt_["filter"]; ok {
+		params.Set("filter", fmt.Sprintf("%v", v))
+	}
 	if v, ok := c.opt_["maxResults"]; ok {
 		params.Set("maxResults", fmt.Sprintf("%v", v))
 	}
@@ -740,17 +772,22 @@
 	//     "rollingUpdate"
 	//   ],
 	//   "parameters": {
-	//     "maxResults": {
-	//       "default": "50",
-	//       "description": "Maximum count of results to be returned. Acceptable values are 1 to 100, inclusive. (Default: 50)",
-	//       "format": "int32",
+	//     "filter": {
+	//       "description": "Optional. Filter expression for filtering listed resources.",
 	//       "location": "query",
-	//       "maximum": "100",
-	//       "minimum": "1",
+	//       "type": "string"
+	//     },
+	//     "maxResults": {
+	//       "default": "500",
+	//       "description": "Optional. Maximum count of results to be returned. Maximum value is 500 and default value is 500.",
+	//       "format": "uint32",
+	//       "location": "query",
+	//       "maximum": "500",
+	//       "minimum": "0",
 	//       "type": "integer"
 	//     },
 	//     "pageToken": {
-	//       "description": "Set this to the nextPageToken value returned by a previous list request to obtain the next page of results from the previous list request.",
+	//       "description": "Optional. Tag returned by a previous list request truncated by maxResults. Used to continue a previous list request.",
 	//       "location": "query",
 	//       "type": "string"
 	//     },
diff --git a/reseller/v1/reseller-gen.go b/reseller/v1/reseller-gen.go
index e958365..1eaaed0 100644
--- a/reseller/v1/reseller-gen.go
+++ b/reseller/v1/reseller-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "reseller:v1"
 const apiName = "reseller"
diff --git a/reseller/v1sandbox/reseller-gen.go b/reseller/v1sandbox/reseller-gen.go
index 2651db6..c60a75c 100644
--- a/reseller/v1sandbox/reseller-gen.go
+++ b/reseller/v1sandbox/reseller-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "reseller:v1sandbox"
 const apiName = "reseller"
diff --git a/resourceviews/v1beta1/resourceviews-gen.go b/resourceviews/v1beta1/resourceviews-gen.go
index 9a1aba8..7051f7e 100644
--- a/resourceviews/v1beta1/resourceviews-gen.go
+++ b/resourceviews/v1beta1/resourceviews-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "resourceviews:v1beta1"
 const apiName = "resourceviews"
diff --git a/resourceviews/v1beta2/resourceviews-gen.go b/resourceviews/v1beta2/resourceviews-gen.go
index cdafd3c..35a46a7 100644
--- a/resourceviews/v1beta2/resourceviews-gen.go
+++ b/resourceviews/v1beta2/resourceviews-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "resourceviews:v1beta2"
 const apiName = "resourceviews"
diff --git a/siteverification/v1/siteverification-gen.go b/siteverification/v1/siteverification-gen.go
index 625cb5f..b013361 100644
--- a/siteverification/v1/siteverification-gen.go
+++ b/siteverification/v1/siteverification-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "siteVerification:v1"
 const apiName = "siteVerification"
diff --git a/spectrum/v1explorer/spectrum-gen.go b/spectrum/v1explorer/spectrum-gen.go
index 21e2862..09987e0 100644
--- a/spectrum/v1explorer/spectrum-gen.go
+++ b/spectrum/v1explorer/spectrum-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "spectrum:v1explorer"
 const apiName = "spectrum"
diff --git a/sqladmin/v1beta1/sqladmin-gen.go b/sqladmin/v1beta1/sqladmin-gen.go
index c03a26e..7be3072 100644
--- a/sqladmin/v1beta1/sqladmin-gen.go
+++ b/sqladmin/v1beta1/sqladmin-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "sqladmin:v1beta1"
 const apiName = "sqladmin"
diff --git a/sqladmin/v1beta3/sqladmin-gen.go b/sqladmin/v1beta3/sqladmin-gen.go
index c35c9ea..e742804 100644
--- a/sqladmin/v1beta3/sqladmin-gen.go
+++ b/sqladmin/v1beta3/sqladmin-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "sqladmin:v1beta3"
 const apiName = "sqladmin"
diff --git a/storage/v1/storage-api.json b/storage/v1/storage-api.json
index d136da8..e5b64dc 100644
--- a/storage/v1/storage-api.json
+++ b/storage/v1/storage-api.json
@@ -1,6 +1,6 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/ojqzpkOHgCznIuY1zli7zQC6mTc\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/5Bhk48G739sjHde-Z8-H45qQoc8\"",
  "discoveryVersion": "v1",
  "id": "storage:v1",
  "name": "storage",
@@ -1143,6 +1143,11 @@
        "description": "A previously-returned page token representing part of the larger set of results to view.",
        "location": "query"
       },
+      "prefix": {
+       "type": "string",
+       "description": "Filter results to buckets whose names begin with this prefix.",
+       "location": "query"
+      },
       "project": {
        "type": "string",
        "description": "A valid API project identifier.",
diff --git a/storage/v1/storage-gen.go b/storage/v1/storage-gen.go
index 06289fb..9835801 100644
--- a/storage/v1/storage-gen.go
+++ b/storage/v1/storage-gen.go
@@ -1625,6 +1625,13 @@
 	return c
 }
 
+// Prefix sets the optional parameter "prefix": Filter results to
+// buckets whose names begin with this prefix.
+func (c *BucketsListCall) Prefix(prefix string) *BucketsListCall {
+	c.opt_["prefix"] = prefix
+	return c
+}
+
 // Projection sets the optional parameter "projection": Set of
 // properties to return. Defaults to noAcl.
 func (c *BucketsListCall) Projection(projection string) *BucketsListCall {
@@ -1651,6 +1658,9 @@
 	if v, ok := c.opt_["pageToken"]; ok {
 		params.Set("pageToken", fmt.Sprintf("%v", v))
 	}
+	if v, ok := c.opt_["prefix"]; ok {
+		params.Set("prefix", fmt.Sprintf("%v", v))
+	}
 	if v, ok := c.opt_["projection"]; ok {
 		params.Set("projection", fmt.Sprintf("%v", v))
 	}
@@ -1695,6 +1705,11 @@
 	//       "location": "query",
 	//       "type": "string"
 	//     },
+	//     "prefix": {
+	//       "description": "Filter results to buckets whose names begin with this prefix.",
+	//       "location": "query",
+	//       "type": "string"
+	//     },
 	//     "project": {
 	//       "description": "A valid API project identifier.",
 	//       "location": "query",
@@ -4508,10 +4523,13 @@
 		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	var contentLength_ int64
 	var hasMedia_ bool
 	if c.protocol_ != "resumable" {
-		contentLength_, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
 	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
@@ -4528,7 +4546,6 @@
 			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
 		}
 	} else if hasMedia_ {
-		req.ContentLength = contentLength_
 		req.Header.Set("Content-Type", ctype)
 	}
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
diff --git a/storage/v1beta1/storage-api.json b/storage/v1beta1/storage-api.json
index 0c6ba34..b0319e3 100644
--- a/storage/v1beta1/storage-api.json
+++ b/storage/v1beta1/storage-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/_5KTX4vFea9QcHtp4C7YZeROKZU\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/--kbHbD1ylfgYf5jjY5PzcBb6t4\"",
  "discoveryVersion": "v1",
  "id": "storage:v1beta1",
  "name": "storage",
  "version": "v1beta1",
- "revision": "20141112",
+ "revision": "20141229",
  "title": "Cloud Storage API",
  "description": "Lets you store and retrieve potentially-large, immutable data objects.",
  "ownerDomain": "google.com",
diff --git a/storage/v1beta1/storage-gen.go b/storage/v1beta1/storage-gen.go
index 283dc4f..78d2975 100644
--- a/storage/v1beta1/storage-gen.go
+++ b/storage/v1beta1/storage-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "storage:v1beta1"
 const apiName = "storage"
@@ -2325,11 +2327,15 @@
 // method id "storage.objects.insert":
 
 type ObjectsInsertCall struct {
-	s      *Service
-	bucket string
-	object *Object
-	opt_   map[string]interface{}
-	media_ io.Reader
+	s          *Service
+	bucket     string
+	object     *Object
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Stores new data blobs and associated metadata.
@@ -2355,8 +2361,32 @@
 	c.opt_["projection"] = projection
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *ObjectsInsertCall) Media(r io.Reader) *ObjectsInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *ObjectsInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *ObjectsInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *ObjectsInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *ObjectsInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -2387,20 +2417,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "b/{bucket}/o")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"bucket": c.bucket,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -2410,6 +2462,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Object
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
diff --git a/storage/v1beta2/storage-api.json b/storage/v1beta2/storage-api.json
index 11a6dba..b552bed 100644
--- a/storage/v1beta2/storage-api.json
+++ b/storage/v1beta2/storage-api.json
@@ -1,11 +1,11 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/4UvMVcTBgZntsuI4--MM9y0RNiQ\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/15LYn2He35ZdsKeqcXSKVIUDAPY\"",
  "discoveryVersion": "v1",
  "id": "storage:v1beta2",
  "name": "storage",
  "version": "v1beta2",
- "revision": "20141112",
+ "revision": "20141229",
  "title": "Cloud Storage API",
  "description": "Lets you store and retrieve potentially-large, immutable data objects.",
  "ownerDomain": "google.com",
diff --git a/storage/v1beta2/storage-gen.go b/storage/v1beta2/storage-gen.go
index cd753c2..9e85dd1 100644
--- a/storage/v1beta2/storage-gen.go
+++ b/storage/v1beta2/storage-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "storage:v1beta2"
 const apiName = "storage"
@@ -4045,11 +4047,15 @@
 // method id "storage.objects.insert":
 
 type ObjectsInsertCall struct {
-	s      *Service
-	bucket string
-	object *Object
-	opt_   map[string]interface{}
-	media_ io.Reader
+	s          *Service
+	bucket     string
+	object     *Object
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Stores new data blobs and associated metadata.
@@ -4108,8 +4114,32 @@
 	c.opt_["projection"] = projection
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *ObjectsInsertCall) Media(r io.Reader) *ObjectsInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *ObjectsInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *ObjectsInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *ObjectsInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *ObjectsInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -4152,20 +4182,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "b/{bucket}/o")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.Expand(req.URL, map[string]string{
 		"bucket": c.bucket,
 	})
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -4175,6 +4227,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Object
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
diff --git a/tagmanager/v1/tagmanager-api.json b/tagmanager/v1/tagmanager-api.json
index b41ce13..7c02996 100644
--- a/tagmanager/v1/tagmanager-api.json
+++ b/tagmanager/v1/tagmanager-api.json
@@ -1,12 +1,12 @@
 {
  "kind": "discovery#restDescription",
- "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/xcGgQKwhk2lzAuNnzm2cQsk-PZM\"",
+ "etag": "\"ye6orv2F-1npMW3u9suM3a7C5Bo/Ob5VAt9jPBqja2_DA3xCwh8JLjs\"",
  "discoveryVersion": "v1",
  "id": "tagmanager:v1",
  "name": "tagmanager",
  "canonicalName": "Tag Manager",
  "version": "v1",
- "revision": "20141112",
+ "revision": "20150121",
  "title": "Tag Manager API",
  "description": "API for accessing Tag Manager accounts and containers.",
  "ownerDomain": "google.com",
@@ -229,7 +229,7 @@
     },
     "enabledBuiltInVariable": {
      "type": "array",
-     "description": "List of enabled built-in variables.",
+     "description": "List of enabled built-in variables. Valid values include: pageUrl, pageHostname, pagePath, referrer, event, clickElement, clickClasses, clickId, clickTarget, clickUrl, clickText, formElement, formClasses, formId, formTarget, formUrl, formText, errorMessage, errorUrl, errorLine, newHistoryFragment, oldHistoryFragment, newHistoryState, oldHistoryState, historySource, containerVersion, debugMode, randomNumber, containerId.",
      "items": {
       "type": "string",
       "enum": [
@@ -1101,7 +1101,7 @@
     },
     "uniqueTriggerId": {
      "$ref": "Parameter",
-     "description": "Globally unique id of the trigger that auto-generates this Form Submit or Link Click listeners if any. Used to make incompatible auto-events work together with trigger filtering based on trigger ids. This value is populated during output generation since the tags implied by triggers don't exist until then. Only valid for Form Submission and Link Click triggers."
+     "description": "Globally unique id of the trigger that auto-generates this (a Form Submit, Link Click or Timer listener) if any. Used to make incompatible auto-events work together with trigger filtering based on trigger ids. This value is populated during output generation since the tags implied by triggers don't exist until then. Only valid for Form Submit, Link Click and Timer triggers."
     },
     "videoPercentageList": {
      "$ref": "Parameter",
diff --git a/tagmanager/v1/tagmanager-gen.go b/tagmanager/v1/tagmanager-gen.go
index 830ae52..396f5e4 100644
--- a/tagmanager/v1/tagmanager-gen.go
+++ b/tagmanager/v1/tagmanager-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "tagmanager:v1"
 const apiName = "tagmanager"
@@ -238,7 +240,13 @@
 	// Container.
 	DomainName []string `json:"domainName,omitempty"`
 
-	// EnabledBuiltInVariable: List of enabled built-in variables.
+	// EnabledBuiltInVariable: List of enabled built-in variables. Valid
+	// values include: pageUrl, pageHostname, pagePath, referrer, event,
+	// clickElement, clickClasses, clickId, clickTarget, clickUrl,
+	// clickText, formElement, formClasses, formId, formTarget, formUrl,
+	// formText, errorMessage, errorUrl, errorLine, newHistoryFragment,
+	// oldHistoryFragment, newHistoryState, oldHistoryState, historySource,
+	// containerVersion, debugMode, randomNumber, containerId.
 	EnabledBuiltInVariable []string `json:"enabledBuiltInVariable,omitempty"`
 
 	// Fingerprint: The fingerprint of the GTM Container as computed at
@@ -654,11 +662,11 @@
 	Type string `json:"type,omitempty"`
 
 	// UniqueTriggerId: Globally unique id of the trigger that
-	// auto-generates this Form Submit or Link Click listeners if any. Used
-	// to make incompatible auto-events work together with trigger filtering
-	// based on trigger ids. This value is populated during output
+	// auto-generates this (a Form Submit, Link Click or Timer listener) if
+	// any. Used to make incompatible auto-events work together with trigger
+	// filtering based on trigger ids. This value is populated during output
 	// generation since the tags implied by triggers don't exist until then.
-	// Only valid for Form Submission and Link Click triggers.
+	// Only valid for Form Submit, Link Click and Timer triggers.
 	UniqueTriggerId *Parameter `json:"uniqueTriggerId,omitempty"`
 
 	// VideoPercentageList: List of integer percentage values. The trigger
diff --git a/taskqueue/v1beta1/taskqueue-gen.go b/taskqueue/v1beta1/taskqueue-gen.go
index fac51a4..bed465d 100644
--- a/taskqueue/v1beta1/taskqueue-gen.go
+++ b/taskqueue/v1beta1/taskqueue-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "taskqueue:v1beta1"
 const apiName = "taskqueue"
diff --git a/taskqueue/v1beta2/taskqueue-gen.go b/taskqueue/v1beta2/taskqueue-gen.go
index d2045be..7d780ab 100644
--- a/taskqueue/v1beta2/taskqueue-gen.go
+++ b/taskqueue/v1beta2/taskqueue-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "taskqueue:v1beta2"
 const apiName = "taskqueue"
diff --git a/tasks/v1/tasks-gen.go b/tasks/v1/tasks-gen.go
index e009d0c..6fc7283 100644
--- a/tasks/v1/tasks-gen.go
+++ b/tasks/v1/tasks-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "tasks:v1"
 const apiName = "tasks"
diff --git a/translate/v2/translate-gen.go b/translate/v2/translate-gen.go
index d84a8e5..fa59c86 100644
--- a/translate/v2/translate-gen.go
+++ b/translate/v2/translate-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "translate:v2"
 const apiName = "translate"
diff --git a/urlshortener/v1/urlshortener-gen.go b/urlshortener/v1/urlshortener-gen.go
index 5b3b878..600aca7 100644
--- a/urlshortener/v1/urlshortener-gen.go
+++ b/urlshortener/v1/urlshortener-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "urlshortener:v1"
 const apiName = "urlshortener"
diff --git a/webfonts/v1/webfonts-gen.go b/webfonts/v1/webfonts-gen.go
index f1d91fe..305e0e4 100644
--- a/webfonts/v1/webfonts-gen.go
+++ b/webfonts/v1/webfonts-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "webfonts:v1"
 const apiName = "webfonts"
diff --git a/webmasters/v3/webmasters-gen.go b/webmasters/v3/webmasters-gen.go
index 7855af5..3266c53 100644
--- a/webmasters/v3/webmasters-gen.go
+++ b/webmasters/v3/webmasters-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "webmasters:v3"
 const apiName = "webmasters"
diff --git a/youtube/v3/youtube-gen.go b/youtube/v3/youtube-gen.go
index d3b7c28..af3efdf 100644
--- a/youtube/v3/youtube-gen.go
+++ b/youtube/v3/youtube-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "youtube:v3"
 const apiName = "youtube"
@@ -3439,6 +3441,10 @@
 	channelbannerresource *ChannelBannerResource
 	opt_                  map[string]interface{}
 	media_                io.Reader
+	resumable_            googleapi.SizeReaderAt
+	mediaType_            string
+	ctx_                  context.Context
+	protocol_             string
 }
 
 // Insert: Uploads a channel banner image to YouTube. This method
@@ -3478,8 +3484,32 @@
 	c.opt_["onBehalfOfContentOwner"] = onBehalfOfContentOwner
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *ChannelBannersInsertCall) Media(r io.Reader) *ChannelBannersInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *ChannelBannersInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *ChannelBannersInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *ChannelBannersInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *ChannelBannersInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -3507,18 +3537,40 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "channelBanners/insert")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.SetOpaque(req.URL)
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -3528,6 +3580,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *ChannelBannerResource
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -8817,10 +8884,14 @@
 // method id "youtube.thumbnails.set":
 
 type ThumbnailsSetCall struct {
-	s       *Service
-	videoId string
-	opt_    map[string]interface{}
-	media_  io.Reader
+	s          *Service
+	videoId    string
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Set: Uploads a custom video thumbnail to YouTube and sets it for a
@@ -8845,8 +8916,32 @@
 	c.opt_["onBehalfOfContentOwner"] = onBehalfOfContentOwner
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *ThumbnailsSetCall) Media(r io.Reader) *ThumbnailsSetCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *ThumbnailsSetCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *ThumbnailsSetCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *ThumbnailsSetCall) ProgressUpdater(pu googleapi.ProgressUpdater) *ThumbnailsSetCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -8870,20 +8965,42 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "thumbnails/set")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
 	body = new(bytes.Buffer)
 	ctype := "application/json"
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.SetOpaque(req.URL)
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -8893,6 +9010,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *ThumbnailSetResponse
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -9280,11 +9412,15 @@
 // method id "youtube.videos.insert":
 
 type VideosInsertCall struct {
-	s      *Service
-	part   string
-	video  *Video
-	opt_   map[string]interface{}
-	media_ io.Reader
+	s          *Service
+	part       string
+	video      *Video
+	opt_       map[string]interface{}
+	media_     io.Reader
+	resumable_ googleapi.SizeReaderAt
+	mediaType_ string
+	ctx_       context.Context
+	protocol_  string
 }
 
 // Insert: Uploads a video to YouTube and optionally sets the video's
@@ -9366,8 +9502,32 @@
 	c.opt_["stabilize"] = stabilize
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *VideosInsertCall) Media(r io.Reader) *VideosInsertCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *VideosInsertCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *VideosInsertCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *VideosInsertCall) ProgressUpdater(pu googleapi.ProgressUpdater) *VideosInsertCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -9408,18 +9568,40 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "videos")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.SetOpaque(req.URL)
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return nil, fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -9429,6 +9611,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return nil, err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return nil, err
+		}
+	}
 	var ret *Video
 	if err := json.NewDecoder(res.Body).Decode(&ret); err != nil {
 		return nil, err
@@ -10011,6 +10208,10 @@
 	invideobranding *InvideoBranding
 	opt_            map[string]interface{}
 	media_          io.Reader
+	resumable_      googleapi.SizeReaderAt
+	mediaType_      string
+	ctx_            context.Context
+	protocol_       string
 }
 
 // Set: Uploads a watermark image to YouTube and sets it for a channel.
@@ -10035,8 +10236,32 @@
 	c.opt_["onBehalfOfContentOwner"] = onBehalfOfContentOwner
 	return c
 }
+
+// Media specifies the media to upload in a single chunk.
+// At most one of Media and ResumableMedia may be set.
 func (c *WatermarksSetCall) Media(r io.Reader) *WatermarksSetCall {
 	c.media_ = r
+	c.protocol_ = "multipart"
+	return c
+}
+
+// ResumableMedia specifies the media to upload in chunks and can be cancelled with ctx.
+// At most one of Media and ResumableMedia may be set.
+// mediaType identifies the MIME media type of the upload, such as "image/png".
+// If mediaType is "", it will be auto-detected.
+func (c *WatermarksSetCall) ResumableMedia(ctx context.Context, r io.ReaderAt, size int64, mediaType string) *WatermarksSetCall {
+	c.ctx_ = ctx
+	c.resumable_ = io.NewSectionReader(r, 0, size)
+	c.mediaType_ = mediaType
+	c.protocol_ = "resumable"
+	return c
+}
+
+// ProgressUpdater provides a callback function that will be called after every chunk.
+// It should be a low-latency function in order to not slow down the upload operation.
+// This should only be called when using ResumableMedia (as opposed to Media).
+func (c *WatermarksSetCall) ProgressUpdater(pu googleapi.ProgressUpdater) *WatermarksSetCall {
+	c.opt_["progressUpdater"] = pu
 	return c
 }
 
@@ -10065,18 +10290,40 @@
 		params.Set("fields", fmt.Sprintf("%v", v))
 	}
 	urls := googleapi.ResolveRelative(c.s.BasePath, "watermarks/set")
-	if c.media_ != nil {
+	var progressUpdater_ googleapi.ProgressUpdater
+	if v, ok := c.opt_["progressUpdater"]; ok {
+		if pu, ok := v.(googleapi.ProgressUpdater); ok {
+			progressUpdater_ = pu
+		}
+	}
+	if c.media_ != nil || c.resumable_ != nil {
 		urls = strings.Replace(urls, "https://www.googleapis.com/", "https://www.googleapis.com/upload/", 1)
-		params.Set("uploadType", "multipart")
+		params.Set("uploadType", c.protocol_)
 	}
 	urls += "?" + params.Encode()
-	contentLength_, hasMedia_ := googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+	var hasMedia_ bool
+	if c.protocol_ != "resumable" {
+		var cancel func()
+		cancel, hasMedia_ = googleapi.ConditionallyIncludeMedia(c.media_, &body, &ctype)
+		if cancel != nil {
+			defer cancel()
+		}
+	}
 	req, _ := http.NewRequest("POST", urls, body)
 	googleapi.SetOpaque(req.URL)
-	if hasMedia_ {
-		req.ContentLength = contentLength_
+	if c.protocol_ == "resumable" {
+		req.ContentLength = 0
+		if c.mediaType_ == "" {
+			c.mediaType_ = googleapi.DetectMediaType(c.resumable_)
+		}
+		req.Header.Set("X-Upload-Content-Type", c.mediaType_)
+		req.Body = nil
+		if params.Get("name") == "" {
+			return fmt.Errorf("resumable uploads must set the Name parameter.")
+		}
+	} else if hasMedia_ {
+		req.Header.Set("Content-Type", ctype)
 	}
-	req.Header.Set("Content-Type", ctype)
 	req.Header.Set("User-Agent", "google-api-go-client/0.5")
 	res, err := c.s.client.Do(req)
 	if err != nil {
@@ -10086,6 +10333,21 @@
 	if err := googleapi.CheckResponse(res); err != nil {
 		return err
 	}
+	if c.protocol_ == "resumable" {
+		loc := res.Header.Get("Location")
+		rx := &googleapi.ResumableUpload{
+			Client:        c.s.client,
+			URI:           loc,
+			Media:         c.resumable_,
+			MediaType:     c.mediaType_,
+			ContentLength: c.resumable_.Size(),
+			Callback:      progressUpdater_,
+		}
+		res, err = rx.Upload(c.ctx_)
+		if err != nil {
+			return err
+		}
+	}
 	return nil
 	// {
 	//   "description": "Uploads a watermark image to YouTube and sets it for a channel.",
diff --git a/youtubeanalytics/v1/youtubeanalytics-gen.go b/youtubeanalytics/v1/youtubeanalytics-gen.go
index a4f3c4a..1500994 100644
--- a/youtubeanalytics/v1/youtubeanalytics-gen.go
+++ b/youtubeanalytics/v1/youtubeanalytics-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "youtubeAnalytics:v1"
 const apiName = "youtubeAnalytics"
diff --git a/youtubeanalytics/v1beta1/youtubeanalytics-gen.go b/youtubeanalytics/v1beta1/youtubeanalytics-gen.go
index 833d84c..98567ae 100644
--- a/youtubeanalytics/v1beta1/youtubeanalytics-gen.go
+++ b/youtubeanalytics/v1beta1/youtubeanalytics-gen.go
@@ -14,6 +14,7 @@
 	"encoding/json"
 	"errors"
 	"fmt"
+	"golang.org/x/net/context"
 	"google.golang.org/api/googleapi"
 	"io"
 	"net/http"
@@ -33,6 +34,7 @@
 var _ = googleapi.Version
 var _ = errors.New
 var _ = strings.Replace
+var _ = context.Background
 
 const apiId = "youtubeAnalytics:v1beta1"
 const apiName = "youtubeAnalytics"