examples: remove examples for Google Cloud APIs

The examples will still be available in git, a link and commit hash
are provided. Most people looking for example/getting-started code
will want to get started with the package cloud.

Fixes #192

Change-Id: I49efbbb94a9cb8eaf5e6f219b19d20d16403d362
Reviewed-on: https://code-review.googlesource.com/29510
Reviewed-by: Jonathan Amsterdam <jba@google.com>
diff --git a/examples/README.md b/examples/README.md
new file mode 100644
index 0000000..1930f79
--- /dev/null
+++ b/examples/README.md
@@ -0,0 +1,15 @@
+# Check out the new, idiomatic GCP library
+
+https://github.com/GoogleCloudPlatform/google-cloud-go
+
+https://godoc.org/cloud.google.com/go
+
+## Each package contains its own examples
+
+https://godoc.org/cloud.google.com/go/bigquery#pkg-examples
+
+https://godoc.org/cloud.google.com/go/pubsub#pkg-examples
+
+https://godoc.org/cloud.google.com/go/storage#pkg-examples
+
+If you are still looking for Google Cloud examples from this library check out [3639d6d](https://code.googlesource.com/google-api-go-client/+/3639d6d93f377f39a1de765fa4ef37b3c7ca8bd9/examples/)
diff --git a/examples/bigquery.go b/examples/bigquery.go
deleted file mode 100644
index a707b5b..0000000
--- a/examples/bigquery.go
+++ /dev/null
@@ -1,368 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package main
-
-import (
-	"container/list"
-	"encoding/json"
-	"fmt"
-	"io/ioutil"
-	"log"
-	"math"
-	"math/rand"
-	"net/http"
-	"os"
-	"strconv"
-	"strings"
-	"time"
-
-	bigquery "google.golang.org/api/bigquery/v2"
-	storage "google.golang.org/api/storage/v1"
-)
-
-const (
-	GB                         = 1 << 30
-	MaxBackoff                 = 30000
-	BaseBackoff                = 250
-	BackoffGrowthFactor        = 1.8
-	BackoffGrowthDamper        = 0.25
-	JobStatusDone              = "DONE"
-	DatasetAlreadyExists       = "Already Exists: Dataset"
-	TableWriteEmptyDisposition = "WRITE_EMPTY"
-)
-
-func init() {
-	scope := fmt.Sprintf("%s %s %s", bigquery.BigqueryScope,
-		storage.DevstorageReadOnlyScope,
-		"https://www.googleapis.com/auth/userinfo.profile")
-	registerDemo("bigquery", scope, bqMain)
-}
-
-// This example demonstrates loading objects from Google Cloud Storage into
-// BigQuery. Objects are specified by their bucket and a name prefix. Each
-// object will be loaded into a new table identified by the object name minus
-// any file extension. All tables are added to the specified dataset (one will
-// be created if necessary). Currently, tables will not be overwritten and an
-// attempt to load an object into a dataset that already contains its table
-// will emit an error message indicating the table already exists.
-// A schema file must be provided and it will be applied to every object/table.
-// Example usage:
-//   go-api-demo -clientid="my-clientid" -secret="my-secret" bq myProject
-//								myDataBucket datafile2013070 DataFiles2013
-//								./datafile_schema.json 100
-//
-// This will load all objects (e.g. all data files from July 2013) from
-// gs://myDataBucket into a (possibly new) BigQuery dataset named DataFiles2013
-// using the schema file provided and allowing up to 100 bad records. Assuming
-// each object is named like datafileYYYYMMDD.csv.gz and all of July's files are
-// stored in the bucket, 9 tables will be created named like datafile201307DD
-// where DD ranges from 01 to 09, inclusive.
-// When the program completes, it will emit a results line similar to:
-//
-// 9 files loaded in 3m58s (18m2.708s). Size: 7.18GB Rows: 7130725
-//
-// The total elapsed time from the start of first job to the end of the last job
-// (effectively wall clock time) is shown. In parenthesis is the aggregate time
-// taken to load all tables.
-func bqMain(client *http.Client, argv []string) {
-	if len(argv) != 6 {
-		fmt.Fprintln(os.Stderr,
-			"Usage: bq project_id bucket prefix dataset schema max_bad_records")
-		return
-	}
-
-	var (
-		project    = argv[0]
-		bucket     = argv[1]
-		objPrefix  = argv[2]
-		datasetId  = argv[3]
-		schemaFile = argv[4]
-	)
-	badRecords, err := strconv.ParseInt(argv[5], 10, 64)
-	if err != nil {
-		fmt.Fprintln(os.Stderr, err)
-		return
-	}
-
-	rand.Seed(time.Now().UnixNano())
-
-	service, err := storage.New(client)
-	if err != nil {
-		log.Fatalf("Unable to create Storage service: %v", err)
-	}
-
-	// Get the list of objects in the bucket matching the specified prefix.
-	list := service.Objects.List(bucket)
-	list.Prefix(objPrefix)
-	objects, err := list.Do()
-	if err != nil {
-		fmt.Fprintln(os.Stderr, err)
-		return
-	}
-
-	// Create the wrapper and insert the (new) dataset.
-	dataset, err := newBQDataset(client, project, datasetId)
-	if err != nil {
-		fmt.Fprintln(os.Stderr, err)
-		return
-	}
-	if err = dataset.insert(true); err != nil {
-		fmt.Fprintln(os.Stderr, err)
-		return
-	}
-
-	objectSource := &tableSource{
-		maxBadRecords: badRecords,
-		disposition:   TableWriteEmptyDisposition,
-	}
-
-	// Load the schema from disk.
-	f, err := ioutil.ReadFile(schemaFile)
-	if err != nil {
-		fmt.Fprintln(os.Stderr, err)
-		return
-	}
-	if err = json.Unmarshal(f, &objectSource.schema); err != nil {
-		fmt.Fprintln(os.Stderr, err)
-		return
-	}
-
-	// Assumes all objects have .csv, .csv.gz (or no) extension.
-	tableIdFromObject := func(name string) string {
-		return strings.TrimSuffix(strings.TrimSuffix(name, ".gz"), ".csv")
-	}
-
-	// A jobset is way to group a collection of jobs together for monitoring.
-	// For this example, we just use the name of the bucket and object prefix.
-	jobset := fmt.Sprintf("%s:%s", bucket, objPrefix)
-	fmt.Fprintf(os.Stderr, "\nLoading %d objects.\n", len(objects.Items))
-
-	// Load each object into a dataset of the same name (minus any extension).
-	// A successful insert call will inject the job into our queue for monitoring.
-	for _, o := range objects.Items {
-		objectSource.id = tableIdFromObject(o.Name)
-		objectSource.uri = fmt.Sprintf("gs://%s/%s", o.Bucket, o.Name)
-		if err = dataset.load(jobset, objectSource); err != nil {
-			fmt.Fprintln(os.Stderr, err)
-		}
-	}
-
-	dataset.monitor(jobset)
-}
-
-// Wraps the BigQuery service and dataset and provides some helper functions.
-type bqDataset struct {
-	project string
-	id      string
-	bq      *bigquery.Service
-	dataset *bigquery.Dataset
-	jobsets map[string]*list.List
-}
-
-func newBQDataset(client *http.Client, dsProj string, dsId string) (*bqDataset,
-	error) {
-
-	service, err := bigquery.New(client)
-	if err != nil {
-		log.Fatalf("Unable to create BigQuery service: %v", err)
-	}
-
-	return &bqDataset{
-		project: dsProj,
-		id:      dsId,
-		bq:      service,
-		dataset: &bigquery.Dataset{
-			DatasetReference: &bigquery.DatasetReference{
-				DatasetId: dsId,
-				ProjectId: dsProj,
-			},
-		},
-		jobsets: make(map[string]*list.List),
-	}, nil
-}
-
-func (ds *bqDataset) insert(existsOK bool) error {
-	call := ds.bq.Datasets.Insert(ds.project, ds.dataset)
-	_, err := call.Do()
-	if err != nil && (!existsOK || !strings.Contains(err.Error(),
-		DatasetAlreadyExists)) {
-		return err
-	}
-
-	return nil
-}
-
-type tableSource struct {
-	id            string
-	uri           string
-	schema        bigquery.TableSchema
-	maxBadRecords int64
-	disposition   string
-}
-
-func (ds *bqDataset) load(jobset string, source *tableSource) error {
-	job := &bigquery.Job{
-		Configuration: &bigquery.JobConfiguration{
-			Load: &bigquery.JobConfigurationLoad{
-				DestinationTable: &bigquery.TableReference{
-					DatasetId: ds.dataset.DatasetReference.DatasetId,
-					ProjectId: ds.project,
-					TableId:   source.id,
-				},
-				MaxBadRecords:    source.maxBadRecords,
-				Schema:           &source.schema,
-				SourceUris:       []string{source.uri},
-				WriteDisposition: source.disposition,
-			},
-		},
-	}
-
-	call := ds.bq.Jobs.Insert(ds.project, job)
-	job, err := call.Do()
-	if err != nil {
-		return err
-	}
-
-	_, ok := ds.jobsets[jobset]
-	if !ok {
-		ds.jobsets[jobset] = list.New()
-	}
-	ds.jobsets[jobset].PushBack(job)
-
-	return nil
-}
-
-func (ds *bqDataset) getJob(id string) (*bigquery.Job, error) {
-	return ds.bq.Jobs.Get(ds.project, id).Do()
-}
-
-func (ds *bqDataset) monitor(jobset string) {
-	jobq, ok := ds.jobsets[jobset]
-	if !ok {
-		return
-	}
-
-	var backoff float64 = BaseBackoff
-	pause := func(grow bool) {
-		if grow {
-			backoff *= BackoffGrowthFactor
-			backoff -= (backoff * rand.Float64() * BackoffGrowthDamper)
-			backoff = math.Min(backoff, MaxBackoff)
-			fmt.Fprintf(os.Stderr, "[%s] Checking remaining %d jobs...\n", jobset,
-				1+jobq.Len())
-		}
-		time.Sleep(time.Duration(backoff) * time.Millisecond)
-	}
-	var stats jobStats
-
-	// Track a 'head' pending job in queue for detecting cycling.
-	head := ""
-	// Loop until all jobs are done - with either success or error.
-	for jobq.Len() > 0 {
-		jel := jobq.Front()
-		job := jel.Value.(*bigquery.Job)
-		jobq.Remove(jel)
-		jid := job.JobReference.JobId
-		loop := false
-
-		// Check and possibly pick a new head job id.
-		if len(head) == 0 {
-			head = jid
-		} else {
-			if jid == head {
-				loop = true
-			}
-		}
-
-		// Retrieve the job's current status.
-		pause(loop)
-		j, err := ds.getJob(jid)
-		if err != nil {
-			fmt.Fprintln(os.Stderr, err)
-			// In this case of a transient API error, we want keep the job.
-			if j == nil {
-				jobq.PushBack(job)
-			} else {
-				// Must reset head tracker if job is discarded.
-				if loop {
-					head = ""
-					backoff = BaseBackoff
-				}
-			}
-			continue
-		}
-
-		// Reassign with the updated job data (from Get).
-		// We don't use j here as Get might return nil for this value.
-		job = j
-
-		if job.Status.State != JobStatusDone {
-			jobq.PushBack(job)
-			continue
-		}
-
-		if res := job.Status.ErrorResult; res != nil {
-			fmt.Fprintln(os.Stderr, res.Message)
-		} else {
-			stat := job.Statistics
-			lstat := stat.Load
-			stats.files += 1
-			stats.bytesIn += lstat.InputFileBytes
-			stats.bytesOut += lstat.OutputBytes
-			stats.rows += lstat.OutputRows
-			stats.elapsed +=
-				time.Duration(stat.EndTime-stat.StartTime) * time.Millisecond
-
-			if stats.start.IsZero() {
-				stats.start = time.Unix(stat.StartTime/1000, 0)
-			} else {
-				t := time.Unix(stat.StartTime/1000, 0)
-				if stats.start.Sub(t) > 0 {
-					stats.start = t
-				}
-			}
-
-			if stats.finish.IsZero() {
-				stats.finish = time.Unix(stat.EndTime/1000, 0)
-			} else {
-				t := time.Unix(stat.EndTime/1000, 0)
-				if t.Sub(stats.finish) > 0 {
-					stats.finish = t
-				}
-			}
-		}
-		// When the head job is processed reset the backoff since the loads
-		// run in BQ in parallel.
-		if loop {
-			head = ""
-			backoff = BaseBackoff
-		}
-	}
-
-	fmt.Fprintf(os.Stderr, "%#v\n", stats)
-}
-
-type jobStats struct {
-	// Number of files (sources) loaded.
-	files int64
-	// Bytes read from source (possibly compressed).
-	bytesIn int64
-	// Bytes loaded into BigQuery (uncompressed).
-	bytesOut int64
-	// Rows loaded into BigQuery.
-	rows int64
-	// Time taken to load source into table.
-	elapsed time.Duration
-	// Start time of the job.
-	start time.Time
-	// End time of the job.
-	finish time.Time
-}
-
-func (s jobStats) GoString() string {
-	return fmt.Sprintf("\n%d files loaded in %v (%v). Size: %.2fGB Rows: %d\n",
-		s.files, s.finish.Sub(s.start), s.elapsed, float64(s.bytesOut)/GB,
-		s.rows)
-}
diff --git a/examples/pubsub.go b/examples/pubsub.go
deleted file mode 100644
index 7788caf..0000000
--- a/examples/pubsub.go
+++ /dev/null
@@ -1,333 +0,0 @@
-// Copyright 2017 Google Inc. All Rights Reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
-	"bufio"
-	"encoding/base64"
-	"encoding/json"
-	"fmt"
-	"log"
-	"net"
-	"net/http"
-	"net/textproto"
-	"os"
-	"strings"
-
-	pubsub "google.golang.org/api/pubsub/v1beta2"
-)
-
-const USAGE = `Available arguments are:
-    <project_id> list_topics
-    <project_id> create_topic <topic>
-    <project_id> delete_topic <topic>
-    <project_id> list_subscriptions
-    <project_id> create_subscription <subscription> <linked topic>
-    <project_id> delete_subscription <subscription>
-    <project_id> connect_irc <topic> <server> <channel>
-    <project_id> pull_messages <subscription>
-`
-
-type IRCBot struct {
-	server   string
-	port     string
-	nick     string
-	user     string
-	channel  string
-	conn     net.Conn
-	tpReader *textproto.Reader
-}
-
-func NewIRCBot(server, channel, nick string) *IRCBot {
-	return &IRCBot{
-		server:  server,
-		port:    "6667",
-		nick:    nick,
-		channel: channel,
-		conn:    nil,
-		user:    nick,
-	}
-}
-
-func (bot *IRCBot) Connect() {
-	conn, err := net.Dial("tcp", bot.server+":"+bot.port)
-	if err != nil {
-		log.Fatal("unable to connect to IRC server ", err)
-	}
-	bot.conn = conn
-	log.Printf("Connected to IRC server %s (%s)\n",
-		bot.server, bot.conn.RemoteAddr())
-	bot.tpReader = textproto.NewReader(bufio.NewReader(bot.conn))
-	bot.Sendf("USER %s 8 * :%s\r\n", bot.nick, bot.nick)
-	bot.Sendf("NICK %s\r\n", bot.nick)
-	bot.Sendf("JOIN %s\r\n", bot.channel)
-}
-
-func (bot *IRCBot) CheckConnection() {
-	for {
-		line, err := bot.ReadLine()
-		if err != nil {
-			log.Fatal("Unable to read a line during checking the connection.")
-		}
-		if parts := strings.Split(line, " "); len(parts) > 1 {
-			if parts[1] == "004" {
-				log.Println("The nick accepted.")
-			} else if parts[1] == "433" {
-				log.Fatalf("The nick is already in use: %s", line)
-			} else if parts[1] == "366" {
-				log.Println("Starting to publish messages.")
-				return
-			}
-		}
-	}
-}
-
-func (bot *IRCBot) Sendf(format string, args ...interface{}) {
-	fmt.Fprintf(bot.conn, format, args...)
-}
-
-func (bot *IRCBot) Close() {
-	bot.conn.Close()
-}
-
-func (bot *IRCBot) ReadLine() (line string, err error) {
-	return bot.tpReader.ReadLine()
-}
-
-func init() {
-	registerDemo("pubsub", pubsub.PubsubScope, pubsubMain)
-}
-
-func pubsubUsage() {
-	fmt.Fprint(os.Stderr, USAGE)
-}
-
-// Returns a fully qualified resource name for Cloud Pub/Sub.
-func fqrn(res, proj, name string) string {
-	return fmt.Sprintf("projects/%s/%s/%s", proj, res, name)
-}
-
-func fullTopicName(proj, topic string) string {
-	return fqrn("topics", proj, topic)
-}
-
-func fullSubName(proj, topic string) string {
-	return fqrn("subscriptions", proj, topic)
-}
-
-// Check the length of the arguments.
-func checkArgs(argv []string, min int) {
-	if len(argv) < min {
-		pubsubUsage()
-		os.Exit(2)
-	}
-}
-
-func listTopics(service *pubsub.Service, argv []string) {
-	next := ""
-	for {
-		topicsList, err := service.Projects.Topics.List(fmt.Sprintf("projects/%s", argv[0])).PageToken(next).Do()
-		if err != nil {
-			log.Fatalf("listTopics query.Do() failed: %v", err)
-		}
-		for _, topic := range topicsList.Topics {
-			fmt.Println(topic.Name)
-		}
-		next = topicsList.NextPageToken
-		if next == "" {
-			break
-		}
-	}
-}
-
-func createTopic(service *pubsub.Service, argv []string) {
-	checkArgs(argv, 3)
-	topic, err := service.Projects.Topics.Create(fullTopicName(argv[0], argv[2]), &pubsub.Topic{}).Do()
-	if err != nil {
-		log.Fatalf("createTopic Create().Do() failed: %v", err)
-	}
-	fmt.Printf("Topic %s was created.\n", topic.Name)
-}
-
-func deleteTopic(service *pubsub.Service, argv []string) {
-	checkArgs(argv, 3)
-	topicName := fullTopicName(argv[0], argv[2])
-	if _, err := service.Projects.Topics.Delete(topicName).Do(); err != nil {
-		log.Fatalf("deleteTopic Delete().Do() failed: %v", err)
-	}
-	fmt.Printf("Topic %s was deleted.\n", topicName)
-}
-
-func listSubscriptions(service *pubsub.Service, argv []string) {
-	next := ""
-	for {
-		subscriptionsList, err := service.Projects.Subscriptions.List(fmt.Sprintf("projects/%s", argv[0])).PageToken(next).Do()
-		if err != nil {
-			log.Fatalf("listSubscriptions query.Do() failed: %v", err)
-		}
-		for _, subscription := range subscriptionsList.Subscriptions {
-			sub_text, _ := json.MarshalIndent(subscription, "", "  ")
-			fmt.Printf("%s\n", sub_text)
-		}
-		next = subscriptionsList.NextPageToken
-		if next == "" {
-			break
-		}
-	}
-}
-
-func createSubscription(service *pubsub.Service, argv []string) {
-	checkArgs(argv, 4)
-	name := fullSubName(argv[0], argv[2])
-	sub := &pubsub.Subscription{Topic: fullTopicName(argv[0], argv[3])}
-	subscription, err := service.Projects.Subscriptions.Create(name, sub).Do()
-	if err != nil {
-		log.Fatalf("createSubscription Create().Do() failed: %v", err)
-	}
-	fmt.Printf("Subscription %s was created.\n", subscription.Name)
-}
-
-func deleteSubscription(service *pubsub.Service, argv []string) {
-	checkArgs(argv, 3)
-	name := fullSubName(argv[0], argv[2])
-	if _, err := service.Projects.Subscriptions.Delete(name).Do(); err != nil {
-		log.Fatalf("deleteSubscription Delete().Do() failed: %v", err)
-	}
-	fmt.Printf("Subscription %s was deleted.\n", name)
-}
-
-func connectIRC(service *pubsub.Service, argv []string) {
-	checkArgs(argv, 5)
-	topicName := fullTopicName(argv[0], argv[2])
-	server := argv[3]
-	channel := argv[4]
-	nick := fmt.Sprintf("bot-%s", argv[2])
-	ircbot := NewIRCBot(server, channel, nick)
-	ircbot.Connect()
-	defer ircbot.Close()
-	ircbot.CheckConnection()
-	privMark := fmt.Sprintf("PRIVMSG %s :", ircbot.channel)
-	for {
-		line, err := ircbot.ReadLine()
-		if err != nil {
-			log.Fatal("Unable to read a line from the connection.")
-		}
-		parts := strings.Split(line, " ")
-		if len(parts) > 0 && parts[0] == "PING" {
-			ircbot.Sendf("PONG %s\r\n", parts[1])
-		} else {
-			pos := strings.Index(line, privMark)
-			if pos == -1 {
-				continue
-			}
-			privMsg := line[pos+len(privMark) : len(line)]
-			pubsubMessage := &pubsub.PubsubMessage{
-				Data: base64.StdEncoding.EncodeToString([]byte(privMsg)),
-			}
-			publishRequest := &pubsub.PublishRequest{
-				Messages: []*pubsub.PubsubMessage{pubsubMessage},
-			}
-			if _, err := service.Projects.Topics.Publish(topicName, publishRequest).Do(); err != nil {
-				log.Fatalf("connectIRC Publish().Do() failed: %v", err)
-			}
-			log.Println("Published a message to the topic.")
-		}
-	}
-}
-
-func pullMessages(service *pubsub.Service, argv []string) {
-	checkArgs(argv, 3)
-	subName := fullSubName(argv[0], argv[2])
-	pullRequest := &pubsub.PullRequest{
-		ReturnImmediately: false,
-		MaxMessages:       1,
-	}
-	for {
-		pullResponse, err := service.Projects.Subscriptions.Pull(subName, pullRequest).Do()
-		if err != nil {
-			log.Fatalf("pullMessages Pull().Do() failed: %v", err)
-		}
-		for _, receivedMessage := range pullResponse.ReceivedMessages {
-			data, err := base64.StdEncoding.DecodeString(receivedMessage.Message.Data)
-			if err != nil {
-				log.Fatalf("pullMessages DecodeString() failed: %v", err)
-			}
-			fmt.Printf("%s\n", data)
-			ackRequest := &pubsub.AcknowledgeRequest{
-				AckIds: []string{receivedMessage.AckId},
-			}
-			if _, err = service.Projects.Subscriptions.Acknowledge(subName, ackRequest).Do(); err != nil {
-				log.Printf("pullMessages Acknowledge().Do() failed: %v", err)
-			}
-		}
-	}
-}
-
-// This example demonstrates calling the Cloud Pub/Sub API. As of 20
-// Aug 2014, the Cloud Pub/Sub API is only available if you're
-// whitelisted. If you're interested in using it, please apply for the
-// Limited Preview program at the following form:
-// http://goo.gl/Wql9HL
-//
-// Also, before running this example, be sure to enable Cloud Pub/Sub
-// service on your project in Developer Console at:
-// https://console.developers.google.com/
-//
-// It has 8 subcommands as follows:
-//
-//  <project_id> list_topics
-//  <project_id> create_topic <topic>
-//  <project_id> delete_topic <topic>
-//  <project_id> list_subscriptions
-//  <project_id> create_subscription <subscription> <linked topic>
-//  <project_id> delete_subscription <subscription>
-//  <project_id> connect_irc <topic> <server> <channel>
-//  <project_id> pull_messages <subscription>
-//
-// You can use either of your alphanumerical or numerial Cloud Project
-// ID for project_id. You can choose any names for topic and
-// subscription as long as they follow the naming rule described at:
-// https://developers.google.com/pubsub/overview#names
-//
-// You can list/create/delete topics/subscriptions by self-explanatory
-// subcommands, as well as connect to an IRC channel and publish
-// messages from the IRC channel to a specified Cloud Pub/Sub topic by
-// the "connect_irc" subcommand, or continuously pull messages from a
-// specified Cloud Pub/Sub subscription and display the data by the
-// "pull_messages" subcommand.
-func pubsubMain(client *http.Client, argv []string) {
-	checkArgs(argv, 2)
-	service, err := pubsub.New(client)
-	if err != nil {
-		log.Fatalf("Unable to create PubSub service: %v", err)
-	}
-
-	m := map[string]func(service *pubsub.Service, argv []string){
-		"list_topics":         listTopics,
-		"create_topic":        createTopic,
-		"delete_topic":        deleteTopic,
-		"list_subscriptions":  listSubscriptions,
-		"create_subscription": createSubscription,
-		"delete_subscription": deleteSubscription,
-		"connect_irc":         connectIRC,
-		"pull_messages":       pullMessages,
-	}
-	f, ok := m[argv[1]]
-	if !ok {
-		pubsubUsage()
-		os.Exit(2)
-	}
-	f(service, argv)
-}
diff --git a/examples/storage.go b/examples/storage.go
deleted file mode 100644
index 16f2cc3..0000000
--- a/examples/storage.go
+++ /dev/null
@@ -1,78 +0,0 @@
-// Copyright 2017 Google Inc. All Rights Reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
-	"fmt"
-	"io"
-	"io/ioutil"
-	"log"
-	"net/http"
-	"os"
-
-	"google.golang.org/api/googleapi"
-	storage "google.golang.org/api/storage/v1"
-)
-
-func init() {
-	registerDemo("storage", storage.DevstorageReadWriteScope, storageMain)
-}
-
-func storageMain(client *http.Client, argv []string) {
-	if len(argv) != 2 {
-		fmt.Fprintln(os.Stderr, "Usage: storage filename bucket (to upload an object)")
-		return
-	}
-
-	service, err := storage.New(client)
-	if err != nil {
-		log.Fatalf("Unable to create Storage service: %v", err)
-	}
-
-	filename := argv[0]
-	bucket := argv[1]
-
-	goFile, err := os.Open(filename)
-	if err != nil {
-		log.Fatalf("error opening %q: %v", filename, err)
-	}
-	storageObject, err := service.Objects.Insert(bucket, &storage.Object{Name: filename}).Media(goFile).Do()
-	log.Printf("Got storage.Object, err: %#v, %v", storageObject, err)
-	if err != nil {
-		return
-	}
-
-	resp, err := service.Objects.Get(bucket, filename).Download()
-	if err != nil {
-		log.Fatalf("error downloading %q: %v", filename, err)
-	}
-	defer resp.Body.Close()
-
-	n, err := io.Copy(ioutil.Discard, resp.Body)
-	if err != nil {
-		log.Fatalf("error downloading %q: %v", filename, err)
-	}
-
-	log.Printf("Downloaded %d bytes", n)
-
-	// Test If-None-Match - should get a "HTTP 304 Not Modified" response.
-	obj, err := service.Objects.Get(bucket, filename).IfNoneMatch(storageObject.Etag).Do()
-	log.Printf("Got obj, err: %#v, %v", obj, err)
-	if googleapi.IsNotModified(err) {
-		log.Printf("Success. Object not modified since upload.")
-	} else {
-		log.Printf("Error: expected object to not be modified since upload.")
-	}
-}