Move some scripts that were in development/tools/scripts back in this project

They were moved into sdk/scripts when sdk was split from development.

Change-Id: I8404ae5fdeb9060adb76357f29b42c4c8e2054ee
This commit is contained in:
Xavier Ducrohet
2009-12-01 13:03:49 -08:00
parent eb3547b3d0
commit b958224f51
16 changed files with 2722 additions and 0 deletions

131
scripts/add-accounts Executable file
View File

@@ -0,0 +1,131 @@
#!/usr/bin/env python
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A faux Setup Wizard. Stuffs one or two usernames + passwords into the
database on the device.
"""
import sys
if sys.hexversion < 0x02040000:
print "This script requires python 2.4 or higher."
sys.exit(1)
import getpass
import subprocess
import time
import sha
DB = "/data/data/com.google.android.googleapps/databases/accounts.db"
def RunCmd(args):
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
out = proc.stdout.read()
if proc.wait():
print
print "failed: %s" % " ".join(args)
return None
return out
def GetProp(adb_flags, name):
args = ("adb",) + adb_flags + ("shell", "su", "root",
"/system/bin/getprop", name)
return RunCmd(args)
def SetProp(adb_flags, name, value):
args = ("adb",) + adb_flags + ("shell", "su", "root",
"/system/bin/setprop", name, value)
return RunCmd(args)
def DbExists(adb_flags):
args = ("adb",) + adb_flags + ("shell", "su", "root",
"/system/bin/ls", DB)
result = RunCmd(args)
if result is None: return None
return "No such file" not in result
def main(argv):
if len(argv) == 1:
print ("usage: %s [adb flags] "
"[<dasher address[:password]>] "
"[<gmail address[:password]>]") % (argv[0],)
sys.exit(2)
argv = argv[1:]
gmail = None
dasher = None
while argv and "@" in argv[-1]:
addr = argv.pop()
if "@gmail.com" in addr or "@googlemail.com" in addr:
gmail = addr
else:
dasher = addr
adb_flags = tuple(argv)
while True:
db = DbExists(adb_flags)
if db is None:
print "failed to contact device; will retry in 3 seconds"
time.sleep(3)
continue
if db:
print
print "GoogleLoginService has already started on this device;"
print "it's too late to use this script to add accounts."
print
print "This script only works on a freshly-wiped device (or "
print "emulator) while booting for the first time."
print
break
hosted_account = GetProp(adb_flags, "ro.config.hosted_account").strip()
google_account = GetProp(adb_flags, "ro.config.google_account").strip()
if dasher and hosted_account:
print
print "A dasher account is already configured on this device;"
print "can't add", hosted_account
print
dasher = None
if gmail and google_account:
print
print "A google account is already configured on this device;"
print "can't add", google_account
print
gmail = None
if not gmail and not dasher: break
if dasher:
SetProp(adb_flags, "ro.config.hosted_account", dasher)
print "set hosted_account to", dasher
if gmail:
SetProp(adb_flags, "ro.config.google_account", gmail)
print "set google_account to", gmail
break
if __name__ == "__main__":
main(sys.argv)

128
scripts/add-accounts-sdk Executable file
View File

@@ -0,0 +1,128 @@
#!/usr/bin/env python
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A faux Setup Wizard. Stuffs one or two usernames + passwords into the
database on the device.
"""
import sys
if sys.hexversion < 0x02040000:
print "This script requires python 2.4 or higher."
sys.exit(1)
import getpass
import subprocess
import time
import sha
DB = "/data/data/com.google.android.googleapps/databases/accounts.db"
def RunCmd(args):
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
out = proc.stdout.read()
if proc.wait():
print
print "failed: %s" % " ".join(args)
return None
return out
def GetProp(adb_flags, name):
args = ("adb",) + adb_flags + ("shell", "/system/bin/getprop", name)
return RunCmd(args)
def SetProp(adb_flags, name, value):
args = ("adb",) + adb_flags + ("shell", "/system/bin/setprop", name, value)
return RunCmd(args)
def DbExists(adb_flags):
args = ("adb",) + adb_flags + ("shell", "/system/bin/ls", DB)
result = RunCmd(args)
if result is None: return None
return "No such file" not in result
def main(argv):
if len(argv) == 1:
print ("usage: %s [adb flags] "
"[<hosted address[:password]>] "
"[<gmail address[:password]>]") % (argv[0],)
sys.exit(2)
argv = argv[1:]
gmail = None
hosted = None
while argv and "@" in argv[-1]:
addr = argv.pop()
if "@gmail.com" in addr or "@googlemail.com" in addr:
gmail = addr
else:
hosted = addr
adb_flags = tuple(argv)
while True:
db = DbExists(adb_flags)
if db is None:
print "failed to contact device; will retry in 3 seconds"
time.sleep(3)
continue
if db:
print
print "GoogleLoginService has already started on this device;"
print "it's too late to use this script to add accounts."
print
print "This script only works on a freshly-wiped device (or "
print "emulator) while booting for the first time."
print
break
hosted_account = GetProp(adb_flags, "ro.config.hosted_account").strip()
google_account = GetProp(adb_flags, "ro.config.google_account").strip()
if hosted and hosted_account:
print
print "A hosted account is already configured on this device;"
print "can't add", hosted_account
print
hosted = None
if gmail and google_account:
print
print "A google account is already configured on this device;"
print "can't add", google_account
print
gmail = None
if not gmail and not hosted: break
if hosted:
SetProp(adb_flags, "ro.config.hosted_account", hosted)
print "set hosted_account to", hosted
if gmail:
SetProp(adb_flags, "ro.config.google_account", gmail)
print "set google_account to", gmail
break
if __name__ == "__main__":
main(sys.argv)

View File

@@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -0,0 +1,16 @@
application: androidappdocs-staging
version: 1
runtime: python
api_version: 1
handlers:
- url: /gae_shell/static
static_dir: gae_shell/static
expiration: 1d
- url: /gae_shell/.*
script: /gae_shell/shell.py
login: admin
- url: .*
script: main.py

View File

@@ -0,0 +1,17 @@
An interactive, stateful AJAX shell that runs Python code on the server.
Part of http://code.google.com/p/google-app-engine-samples/.
May be run as a standalone app or in an existing app as an admin-only handler.
Can be used for system administration tasks, as an interactive way to try out
APIs, or as a debugging aid during development.
The logging, os, sys, db, and users modules are imported automatically.
Interpreter state is stored in the datastore so that variables, function
definitions, and other values in the global and local namespaces can be used
across commands.
To use the shell in your app, copy shell.py, static/*, and templates/* into
your app's source directory. Then, copy the URL handlers from app.yaml into
your app.yaml.

View File

@@ -0,0 +1,308 @@
#!/usr/bin/python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
An interactive, stateful AJAX shell that runs Python code on the server.
Part of http://code.google.com/p/google-app-engine-samples/.
May be run as a standalone app or in an existing app as an admin-only handler.
Can be used for system administration tasks, as an interactive way to try out
APIs, or as a debugging aid during development.
The logging, os, sys, db, and users modules are imported automatically.
Interpreter state is stored in the datastore so that variables, function
definitions, and other values in the global and local namespaces can be used
across commands.
To use the shell in your app, copy shell.py, static/*, and templates/* into
your app's source directory. Then, copy the URL handlers from app.yaml into
your app.yaml.
TODO: unit tests!
"""
import logging
import new
import os
import pickle
import sys
import traceback
import types
import wsgiref.handlers
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
# Set to True if stack traces should be shown in the browser, etc.
_DEBUG = True
# The entity kind for shell sessions. Feel free to rename to suit your app.
_SESSION_KIND = '_Shell_Session'
# Types that can't be pickled.
UNPICKLABLE_TYPES = (
types.ModuleType,
types.TypeType,
types.ClassType,
types.FunctionType,
)
# Unpicklable statements to seed new sessions with.
INITIAL_UNPICKLABLES = [
'import logging',
'import os',
'import sys',
'from google.appengine.ext import db',
'from google.appengine.api import users',
]
class Session(db.Model):
"""A shell session. Stores the session's globals.
Each session globals is stored in one of two places:
If the global is picklable, it's stored in the parallel globals and
global_names list properties. (They're parallel lists to work around the
unfortunate fact that the datastore can't store dictionaries natively.)
If the global is not picklable (e.g. modules, classes, and functions), or if
it was created by the same statement that created an unpicklable global,
it's not stored directly. Instead, the statement is stored in the
unpicklables list property. On each request, before executing the current
statement, the unpicklable statements are evaluated to recreate the
unpicklable globals.
The unpicklable_names property stores all of the names of globals that were
added by unpicklable statements. When we pickle and store the globals after
executing a statement, we skip the ones in unpicklable_names.
Using Text instead of string is an optimization. We don't query on any of
these properties, so they don't need to be indexed.
"""
global_names = db.ListProperty(db.Text)
globals = db.ListProperty(db.Blob)
unpicklable_names = db.ListProperty(db.Text)
unpicklables = db.ListProperty(db.Text)
def set_global(self, name, value):
"""Adds a global, or updates it if it already exists.
Also removes the global from the list of unpicklable names.
Args:
name: the name of the global to remove
value: any picklable value
"""
blob = db.Blob(pickle.dumps(value))
if name in self.global_names:
index = self.global_names.index(name)
self.globals[index] = blob
else:
self.global_names.append(db.Text(name))
self.globals.append(blob)
self.remove_unpicklable_name(name)
def remove_global(self, name):
"""Removes a global, if it exists.
Args:
name: string, the name of the global to remove
"""
if name in self.global_names:
index = self.global_names.index(name)
del self.global_names[index]
del self.globals[index]
def globals_dict(self):
"""Returns a dictionary view of the globals.
"""
return dict((name, pickle.loads(val))
for name, val in zip(self.global_names, self.globals))
def add_unpicklable(self, statement, names):
"""Adds a statement and list of names to the unpicklables.
Also removes the names from the globals.
Args:
statement: string, the statement that created new unpicklable global(s).
names: list of strings; the names of the globals created by the statement.
"""
self.unpicklables.append(db.Text(statement))
for name in names:
self.remove_global(name)
if name not in self.unpicklable_names:
self.unpicklable_names.append(db.Text(name))
def remove_unpicklable_name(self, name):
"""Removes a name from the list of unpicklable names, if it exists.
Args:
name: string, the name of the unpicklable global to remove
"""
if name in self.unpicklable_names:
self.unpicklable_names.remove(name)
class FrontPageHandler(webapp.RequestHandler):
"""Creates a new session and renders the shell.html template.
"""
def get(self):
# set up the session. TODO: garbage collect old shell sessions
session_key = self.request.get('session')
if session_key:
session = Session.get(session_key)
else:
# create a new session
session = Session()
session.unpicklables = [db.Text(line) for line in INITIAL_UNPICKLABLES]
session_key = session.put()
template_file = os.path.join(os.path.dirname(__file__), 'templates',
'shell.html')
session_url = '/?session=%s' % session_key
vars = { 'server_software': os.environ['SERVER_SOFTWARE'],
'python_version': sys.version,
'session': str(session_key),
'user': users.get_current_user(),
'login_url': users.create_login_url(session_url),
'logout_url': users.create_logout_url(session_url),
}
rendered = webapp.template.render(template_file, vars, debug=_DEBUG)
self.response.out.write(rendered)
class StatementHandler(webapp.RequestHandler):
"""Evaluates a python statement in a given session and returns the result.
"""
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
# extract the statement to be run
statement = self.request.get('statement')
if not statement:
return
# the python compiler doesn't like network line endings
statement = statement.replace('\r\n', '\n')
# add a couple newlines at the end of the statement. this makes
# single-line expressions such as 'class Foo: pass' evaluate happily.
statement += '\n\n'
# log and compile the statement up front
try:
logging.info('Compiling and evaluating:\n%s' % statement)
compiled = compile(statement, '<string>', 'single')
except:
self.response.out.write(traceback.format_exc())
return
# create a dedicated module to be used as this statement's __main__
statement_module = new.module('__main__')
# use this request's __builtin__, since it changes on each request.
# this is needed for import statements, among other things.
import __builtin__
statement_module.__builtins__ = __builtin__
# load the session from the datastore
session = Session.get(self.request.get('session'))
# swap in our custom module for __main__. then unpickle the session
# globals, run the statement, and re-pickle the session globals, all
# inside it.
old_main = sys.modules.get('__main__')
try:
sys.modules['__main__'] = statement_module
statement_module.__name__ = '__main__'
# re-evaluate the unpicklables
for code in session.unpicklables:
exec code in statement_module.__dict__
# re-initialize the globals
for name, val in session.globals_dict().items():
try:
statement_module.__dict__[name] = val
except:
msg = 'Dropping %s since it could not be unpickled.\n' % name
self.response.out.write(msg)
logging.warning(msg + traceback.format_exc())
session.remove_global(name)
# run!
old_globals = dict(statement_module.__dict__)
try:
old_stdout = sys.stdout
old_stderr = sys.stderr
try:
sys.stdout = self.response.out
sys.stderr = self.response.out
exec compiled in statement_module.__dict__
finally:
sys.stdout = old_stdout
sys.stderr = old_stderr
except:
self.response.out.write(traceback.format_exc())
return
# extract the new globals that this statement added
new_globals = {}
for name, val in statement_module.__dict__.items():
if name not in old_globals or val != old_globals[name]:
new_globals[name] = val
if True in [isinstance(val, UNPICKLABLE_TYPES)
for val in new_globals.values()]:
# this statement added an unpicklable global. store the statement and
# the names of all of the globals it added in the unpicklables.
session.add_unpicklable(statement, new_globals.keys())
logging.debug('Storing this statement as an unpicklable.')
else:
# this statement didn't add any unpicklables. pickle and store the
# new globals back into the datastore.
for name, val in new_globals.items():
if not name.startswith('__'):
session.set_global(name, val)
finally:
sys.modules['__main__'] = old_main
session.put()
def main():
application = webapp.WSGIApplication(
[('/gae_shell/', FrontPageHandler),
('/gae_shell/shell.do', StatementHandler)], debug=_DEBUG)
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,195 @@
// Copyright 2007 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview
* Javascript code for the interactive AJAX shell.
*
* Part of http://code.google.com/p/google-app-engine-samples/.
*
* Includes a function (shell.runStatement) that sends the current python
* statement in the shell prompt text box to the server, and a callback
* (shell.done) that displays the results when the XmlHttpRequest returns.
*
* Also includes cross-browser code (shell.getXmlHttpRequest) to get an
* XmlHttpRequest.
*/
/**
* Shell namespace.
* @type {Object}
*/
var shell = {}
/**
* The shell history. history is an array of strings, ordered oldest to
* newest. historyCursor is the current history element that the user is on.
*
* The last history element is the statement that the user is currently
* typing. When a statement is run, it's frozen in the history, a new history
* element is added to the end of the array for the new statement, and
* historyCursor is updated to point to the new element.
*
* @type {Array}
*/
shell.history = [''];
/**
* See {shell.history}
* @type {number}
*/
shell.historyCursor = 0;
/**
* A constant for the XmlHttpRequest 'done' state.
* @type Number
*/
shell.DONE_STATE = 4;
/**
* A cross-browser function to get an XmlHttpRequest object.
*
* @return {XmlHttpRequest?} a new XmlHttpRequest
*/
shell.getXmlHttpRequest = function() {
if (window.XMLHttpRequest) {
return new XMLHttpRequest();
} else if (window.ActiveXObject) {
try {
return new ActiveXObject('Msxml2.XMLHTTP');
} catch(e) {
return new ActiveXObject('Microsoft.XMLHTTP');
}
}
return null;
};
/**
* This is the prompt textarea's onkeypress handler. Depending on the key that
* was pressed, it will run the statement, navigate the history, or update the
* current statement in the history.
*
* @param {Event} event the keypress event
* @return {Boolean} false to tell the browser not to submit the form.
*/
shell.onPromptKeyPress = function(event) {
var statement = document.getElementById('statement');
if (this.historyCursor == this.history.length - 1) {
// we're on the current statement. update it in the history before doing
// anything.
this.history[this.historyCursor] = statement.value;
}
// should we pull something from the history?
if (event.ctrlKey && event.keyCode == 38 /* up arrow */) {
if (this.historyCursor > 0) {
statement.value = this.history[--this.historyCursor];
}
return false;
} else if (event.ctrlKey && event.keyCode == 40 /* down arrow */) {
if (this.historyCursor < this.history.length - 1) {
statement.value = this.history[++this.historyCursor];
}
return false;
} else if (!event.altKey) {
// probably changing the statement. update it in the history.
this.historyCursor = this.history.length - 1;
this.history[this.historyCursor] = statement.value;
}
// should we submit?
var ctrlEnter = (document.getElementById('submit_key').value == 'ctrl-enter');
if (event.keyCode == 13 /* enter */ && !event.altKey && !event.shiftKey &&
event.ctrlKey == ctrlEnter) {
return this.runStatement();
}
};
/**
* The XmlHttpRequest callback. If the request succeeds, it adds the command
* and its resulting output to the shell history div.
*
* @param {XmlHttpRequest} req the XmlHttpRequest we used to send the current
* statement to the server
*/
shell.done = function(req) {
if (req.readyState == this.DONE_STATE) {
var statement = document.getElementById('statement')
statement.className = 'prompt';
// add the command to the shell output
var output = document.getElementById('output');
output.value += '\n>>> ' + statement.value;
statement.value = '';
// add a new history element
this.history.push('');
this.historyCursor = this.history.length - 1;
// add the command's result
var result = req.responseText.replace(/^\s*|\s*$/g, ''); // trim whitespace
if (result != '')
output.value += '\n' + result;
// scroll to the bottom
output.scrollTop = output.scrollHeight;
if (output.createTextRange) {
var range = output.createTextRange();
range.collapse(false);
range.select();
}
}
};
/**
* This is the form's onsubmit handler. It sends the python statement to the
* server, and registers shell.done() as the callback to run when it returns.
*
* @return {Boolean} false to tell the browser not to submit the form.
*/
shell.runStatement = function() {
var form = document.getElementById('form');
// build a XmlHttpRequest
var req = this.getXmlHttpRequest();
if (!req) {
document.getElementById('ajax-status').innerHTML =
"<span class='error'>Your browser doesn't support AJAX. :(</span>";
return false;
}
req.onreadystatechange = function() { shell.done(req); };
// build the query parameter string
var params = '';
for (i = 0; i < form.elements.length; i++) {
var elem = form.elements[i];
if (elem.type != 'submit' && elem.type != 'button' && elem.id != 'caret') {
var value = escape(elem.value).replace(/\+/g, '%2B'); // escape ignores +
params += '&' + elem.name + '=' + value;
}
}
// send the request and tell the user.
document.getElementById('statement').className = 'prompt processing';
req.open(form.method, form.action + '?' + params, true);
req.setRequestHeader('Content-type',
'application/x-www-form-urlencoded;charset=UTF-8');
req.send(null);
return false;
};

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@@ -0,0 +1,122 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title> Interactive Shell </title>
<script type="text/javascript" src="/gae_shell/static/shell.js"></script>
<style type="text/css">
body {
font-family: monospace;
font-size: 10pt;
}
p {
margin: 0.5em;
}
.prompt, #output {
width: 45em;
border: 1px solid silver;
background-color: #f5f5f5;
font-size: 10pt;
margin: 0.5em;
padding: 0.5em;
padding-right: 0em;
overflow-x: hidden;
}
#toolbar {
margin-left: 0.5em;
padding-left: 0.5em;
}
#caret {
width: 2.5em;
margin-right: 0px;
padding-right: 0px;
border-right: 0px;
}
#statement {
width: 43em;
margin-left: -1em;
padding-left: 0px;
border-left: 0px;
background-position: top right;
background-repeat: no-repeat;
}
.processing {
background-image: url("/gae_shell/static/spinner.gif");
}
#ajax-status {
font-weight: bold;
}
.message {
color: #8AD;
font-weight: bold;
font-style: italic;
}
.error {
color: #F44;
}
.username {
font-weight: bold;
}
</style>
</head>
<body>
<p> Interactive server-side Python shell for
<a href="http://code.google.com/appengine/">Google App Engine</a>.
(<a href="http://code.google.com/p/google-app-engine-samples/">source</a>)
</p>
<textarea id="output" rows="22" readonly="readonly">
{{ server_software }}
Python {{ python_version }}
</textarea>
<form id="form" action="shell.do" method="get">
<nobr>
<textarea class="prompt" id="caret" readonly="readonly" rows="4"
onfocus="document.getElementById('statement').focus()"
>&gt;&gt;&gt;</textarea>
<textarea class="prompt" name="statement" id="statement" rows="4"
onkeypress="return shell.onPromptKeyPress(event);"></textarea>
</nobr>
<input type="hidden" name="session" value="{{ session }}" />
<input type="submit" style="display: none" />
</form>
<p id="ajax-status"></p>
<p id="toolbar">
{% if user %}
<span class="username">{{ user.nickname }}</span>
(<a href="{{ logout_url }}">log out</a>)
{% else %}
<a href="{{ login_url }}">log in</a>
{% endif %}
| Ctrl-Up/Down for history |
<select id="submit_key">
<option value="enter">Enter</option>
<option value="ctrl-enter" selected="selected">Ctrl-Enter</option>
</select>
<label for="submit_key">submits</label>
</p>
<script type="text/javascript">
document.getElementById('statement').focus();
</script>
</body>
</html>

View File

@@ -0,0 +1,12 @@
indexes:
# AUTOGENERATED
# This index.yaml is automatically updated whenever the dev_appserver
# detects that a new type of query is run. If you want to manage the
# index.yaml file manually, remove the above marker line (the line
# saying "# AUTOGENERATED"). If you want to manage some indexes
# manually, move them above the marker line. The index.yaml file is
# automatically uploaded to the admin console when you next deploy
# your application using appcfg.py.

View File

@@ -0,0 +1,573 @@
#!/usr/bin/env python
#
# Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A class to serve pages from zip files and use memcache for performance.
This contains a class and a function to create an anonymous instance of the
class to serve HTTP GET requests. Memcache is used to increase response speed
and lower processing cycles used in serving. Credit to Guido van Rossum and
his implementation of zipserve which served as a reference as I wrote this.
MemcachedZipHandler: Class that serves request
create_handler: method to create instance of MemcachedZipHandler
"""
__author__ = 'jmatt@google.com (Justin Mattson)'
import email.Utils
import logging
import mimetypes
import time
import zipfile
from google.appengine.api import memcache
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from time import localtime, strftime
def create_handler(zip_files, max_age=None, public=None):
"""Factory method to create a MemcachedZipHandler instance.
Args:
zip_files: A list of file names, or a list of lists of file name, first
member of file mappings. See MemcachedZipHandler documentation for
more information about using the list of lists format
max_age: The maximum client-side cache lifetime
public: Whether this should be declared public in the client-side cache
Returns:
A MemcachedZipHandler wrapped in a pretty, anonymous bow for use with App
Engine
Raises:
ValueError: if the zip_files argument is not a list
"""
# verify argument integrity. If the argument is passed in list format,
# convert it to list of lists format
if zip_files and type(zip_files).__name__ == 'list':
num_items = len(zip_files)
while num_items > 0:
if type(zip_files[num_items - 1]).__name__ != 'list':
zip_files[num_items - 1] = [zip_files[num_items-1]]
num_items -= 1
else:
raise ValueError('File name arguments must be a list')
class HandlerWrapper(MemcachedZipHandler):
"""Simple wrapper for an instance of MemcachedZipHandler.
I'm still not sure why this is needed
"""
def get(self, name):
self.zipfilenames = zip_files
self.TrueGet(name)
if max_age is not None:
MAX_AGE = max_age
if public is not None:
PUBLIC = public
return HandlerWrapper
class MemcachedZipHandler(webapp.RequestHandler):
"""Handles get requests for a given URL.
Serves a GET request from a series of zip files. As files are served they are
put into memcache, which is much faster than retreiving them from the zip
source file again. It also uses considerably fewer CPU cycles.
"""
zipfile_cache = {} # class cache of source zip files
MAX_AGE = 600 # max client-side cache lifetime
PUBLIC = True # public cache setting
CACHE_PREFIX = 'cache://' # memcache key prefix for actual URLs
NEG_CACHE_PREFIX = 'noncache://' # memcache key prefix for non-existant URL
intlString = 'intl/'
validLangs = ['en', 'de', 'es', 'fr','it','ja','zh-CN','zh-TW']
def TrueGet(self, reqUri):
"""The top-level entry point to serving requests.
Called 'True' get because it does the work when called from the wrapper
class' get method. Some logic is applied to the request to serve files
from an intl/<lang>/... directory or fall through to the default language.
Args:
name: URL requested
Returns:
None
"""
langName = 'en'
resetLangCookie = False
urlLangName = None
retry = False
isValidIntl = False
# Try to retrieve the user's lang pref from the cookie. If there is no
# lang pref cookie in the request, add set-cookie to the response with the
# default value of 'en'.
try:
langName = self.request.cookies['android_developer_pref_lang']
except KeyError:
resetLangCookie = True
#logging.info('==========================EXCEPTION: NO LANG COOKIE FOUND, USING [%s]', langName)
logging.info('==========================REQ INIT name [%s] langName [%s]', reqUri, langName)
# Preprocess the req url. If it references a directory or the domain itself,
# append '/index.html' to the url and 302 redirect. Otherwise, continue
# processing the request below.
name = self.PreprocessUrl(reqUri, langName)
if name:
# Do some prep for handling intl requests. Parse the url and validate
# the intl/lang substring, extract the url lang code (urlLangName) and the
# the uri that follows the intl/lang substring(contentUri)
sections = name.split("/", 2)
contentUri = 0
isIntl = len(sections) > 1 and (sections[0] == "intl")
if isIntl:
isValidIntl = sections[1] in self.validLangs
if isValidIntl:
urlLangName = sections[1]
contentUri = sections[2]
if (langName != urlLangName):
# if the lang code in the request is different from that in
# the cookie, reset the cookie to the url lang value.
langName = urlLangName
resetLangCookie = True
#logging.info('INTL PREP resetting langName to urlLangName [%s]', langName)
#else:
# logging.info('INTL PREP no need to reset langName')
# Send for processing
if self.isCleanUrl(name, langName, isValidIntl):
# handle a 'clean' request.
# Try to form a response using the actual request url.
if not self.CreateResponse(name, langName, isValidIntl, resetLangCookie):
# If CreateResponse returns False, there was no such document
# in the intl/lang tree. Before going to 404, see if there is an
# English-language version of the doc in the default
# default tree and return it, else go to 404.
self.CreateResponse(contentUri, langName, False, resetLangCookie)
elif isIntl:
# handle the case where we need to pass through an invalid intl req
# for processing (so as to get 404 as appropriate). This is needed
# because intl urls are passed through clean and retried in English,
# if necessary.
logging.info(' Handling an invalid intl request...')
self.CreateResponse(name, langName, isValidIntl, resetLangCookie)
else:
# handle the case where we have a non-clean url (usually a non-intl
# url) that we need to interpret in the context of any lang pref
# that is set. Prepend an intl/lang string to the request url and
# send it as a 302 redirect. After the redirect, the subsequent
# request will be handled as a clean url.
self.RedirToIntl(name, self.intlString, langName)
def isCleanUrl(self, name, langName, isValidIntl):
"""Determine whether to pass an incoming url straight to processing.
Args:
name: The incoming URL
Returns:
boolean: Whether the URL should be sent straight to processing
"""
if (langName == 'en') or isValidIntl or not ('.html' in name) or (not isValidIntl and not langName):
return True
def PreprocessUrl(self, name, langName):
"""Any preprocessing work on the URL when it comes in.
Put any work related to interpretting the incoming URL here. For example,
this is used to redirect requests for a directory to the index.html file
in that directory. Subclasses should override this method to do different
preprocessing.
Args:
name: The incoming URL
Returns:
False if the request was redirected to '/index.html', or
The processed URL, otherwise
"""
# determine if this is a request for a directory
final_path_segment = name
final_slash_offset = name.rfind('/')
if final_slash_offset != len(name) - 1:
final_path_segment = name[final_slash_offset + 1:]
if final_path_segment.find('.') == -1:
name = ''.join([name, '/'])
# if this is a directory or the domain itself, redirect to /index.html
if not name or (name[len(name) - 1:] == '/'):
uri = ''.join(['/', name, 'index.html'])
logging.info('--->PREPROCESSING REDIRECT [%s] to [%s] with langName [%s]', name, uri, langName)
self.redirect(uri, False)
return False
else:
return name
def RedirToIntl(self, name, intlString, langName):
"""Redirect an incoming request to the appropriate intl uri.
Builds the intl/lang string from a base (en) string
and redirects (302) the request to look for a version
of the file in the language that matches the client-
supplied cookie value.
Args:
name: The incoming, preprocessed URL
Returns:
The lang-specific URL
"""
builtIntlLangUri = ''.join([intlString, langName, '/', name, '?', self.request.query_string])
uri = ''.join(['/', builtIntlLangUri])
logging.info('-->>REDIRECTING %s to %s', name, uri)
self.redirect(uri, False)
return uri
def CreateResponse(self, name, langName, isValidIntl, resetLangCookie):
"""Process the url and form a response, if appropriate.
Attempts to retrieve the requested file (name) from cache,
negative cache, or store (zip) and form the response.
For intl requests that are not found (in the localized tree),
returns False rather than forming a response, so that
the request can be retried with the base url (this is the
fallthrough to default language).
For requests that are found, forms the headers and
adds the content to the response entity. If the request was
for an intl (localized) url, also resets the language cookie
to the language specified in the url if needed, to ensure that
the client language and response data remain harmonious.
Args:
name: The incoming, preprocessed URL
langName: The language id. Used as necessary to reset the
language cookie in the response.
isValidIntl: If present, indicates whether the request is
for a language-specific url
resetLangCookie: Whether the response should reset the
language cookie to 'langName'
Returns:
True: A response was successfully created for the request
False: No response was created.
"""
# see if we have the page in the memcache
logging.info('PROCESSING %s langName [%s] isValidIntl [%s] resetLang [%s]',
name, langName, isValidIntl, resetLangCookie)
resp_data = self.GetFromCache(name)
if resp_data is None:
logging.info(' Cache miss for %s', name)
resp_data = self.GetFromNegativeCache(name)
if resp_data is None:
resp_data = self.GetFromStore(name)
# IF we have the file, put it in the memcache
# ELSE put it in the negative cache
if resp_data is not None:
self.StoreOrUpdateInCache(name, resp_data)
elif isValidIntl:
# couldn't find the intl doc. Try to fall through to English.
#logging.info(' Retrying with base uri...')
return False
else:
logging.info(' Adding %s to negative cache, serving 404', name)
self.StoreInNegativeCache(name)
self.Write404Error()
return True
else:
# found it in negative cache
self.Write404Error()
return True
# found content from cache or store
logging.info('FOUND CLEAN')
if resetLangCookie:
logging.info(' Resetting android_developer_pref_lang cookie to [%s]',
langName)
expireDate = time.mktime(localtime()) + 60 * 60 * 24 * 365 * 10
self.response.headers.add_header('Set-Cookie',
'android_developer_pref_lang=%s; path=/; expires=%s' %
(langName, strftime("%a, %d %b %Y %H:%M:%S", localtime(expireDate))))
mustRevalidate = False
if ('.html' in name):
# revalidate html files -- workaround for cache inconsistencies for
# negotiated responses
mustRevalidate = True
logging.info(' Adding [Vary: Cookie] to response...')
self.response.headers.add_header('Vary', 'Cookie')
content_type, encoding = mimetypes.guess_type(name)
if content_type:
self.response.headers['Content-Type'] = content_type
self.SetCachingHeaders(mustRevalidate)
self.response.out.write(resp_data)
elif (name == 'favicon.ico'):
self.response.headers['Content-Type'] = 'image/x-icon'
self.SetCachingHeaders(mustRevalidate)
self.response.out.write(resp_data)
elif name.endswith('.psd'):
self.response.headers['Content-Type'] = 'application/octet-stream'
self.SetCachingHeaders(mustRevalidate)
self.response.out.write(resp_data)
return True
def GetFromStore(self, file_path):
"""Retrieve file from zip files.
Get the file from the source, it must not have been in the memcache. If
possible, we'll use the zip file index to quickly locate where the file
should be found. (See MapToFileArchive documentation for assumptions about
file ordering.) If we don't have an index or don't find the file where the
index says we should, look through all the zip files to find it.
Args:
file_path: the file that we're looking for
Returns:
The contents of the requested file
"""
resp_data = None
file_itr = iter(self.zipfilenames)
# check the index, if we have one, to see what archive the file is in
archive_name = self.MapFileToArchive(file_path)
if not archive_name:
archive_name = file_itr.next()[0]
while resp_data is None and archive_name:
zip_archive = self.LoadZipFile(archive_name)
if zip_archive:
# we expect some lookups will fail, and that's okay, 404s will deal
# with that
try:
resp_data = zip_archive.read(file_path)
except (KeyError, RuntimeError), err:
# no op
x = False
if resp_data is not None:
logging.info('%s read from %s', file_path, archive_name)
try:
archive_name = file_itr.next()[0]
except (StopIteration), err:
archive_name = False
return resp_data
def LoadZipFile(self, zipfilename):
"""Convenience method to load zip file.
Just a convenience method to load the zip file from the data store. This is
useful if we ever want to change data stores and also as a means of
dependency injection for testing. This method will look at our file cache
first, and then load and cache the file if there's a cache miss
Args:
zipfilename: the name of the zip file to load
Returns:
The zip file requested, or None if there is an I/O error
"""
zip_archive = None
zip_archive = self.zipfile_cache.get(zipfilename)
if zip_archive is None:
try:
zip_archive = zipfile.ZipFile(zipfilename)
self.zipfile_cache[zipfilename] = zip_archive
except (IOError, RuntimeError), err:
logging.error('Can\'t open zipfile %s, cause: %s' % (zipfilename,
err))
return zip_archive
def MapFileToArchive(self, file_path):
"""Given a file name, determine what archive it should be in.
This method makes two critical assumptions.
(1) The zip files passed as an argument to the handler, if concatenated
in that same order, would result in a total ordering
of all the files. See (2) for ordering type.
(2) Upper case letters before lower case letters. The traversal of a
directory tree is depth first. A parent directory's files are added
before the files of any child directories
Args:
file_path: the file to be mapped to an archive
Returns:
The name of the archive where we expect the file to be
"""
num_archives = len(self.zipfilenames)
while num_archives > 0:
target = self.zipfilenames[num_archives - 1]
if len(target) > 1:
if self.CompareFilenames(target[1], file_path) >= 0:
return target[0]
num_archives -= 1
return None
def CompareFilenames(self, file1, file2):
"""Determines whether file1 is lexigraphically 'before' file2.
WARNING: This method assumes that paths are output in a depth-first,
with parent directories' files stored before childs'
We say that file1 is lexigraphically before file2 if the last non-matching
path segment of file1 is alphabetically before file2.
Args:
file1: the first file path
file2: the second file path
Returns:
A positive number if file1 is before file2
A negative number if file2 is before file1
0 if filenames are the same
"""
f1_segments = file1.split('/')
f2_segments = file2.split('/')
segment_ptr = 0
while (segment_ptr < len(f1_segments) and
segment_ptr < len(f2_segments) and
f1_segments[segment_ptr] == f2_segments[segment_ptr]):
segment_ptr += 1
if len(f1_segments) == len(f2_segments):
# we fell off the end, the paths much be the same
if segment_ptr == len(f1_segments):
return 0
# we didn't fall of the end, compare the segments where they differ
if f1_segments[segment_ptr] < f2_segments[segment_ptr]:
return 1
elif f1_segments[segment_ptr] > f2_segments[segment_ptr]:
return -1
else:
return 0
# the number of segments differs, we either mismatched comparing
# directories, or comparing a file to a directory
else:
# IF we were looking at the last segment of one of the paths,
# the one with fewer segments is first because files come before
# directories
# ELSE we just need to compare directory names
if (segment_ptr + 1 == len(f1_segments) or
segment_ptr + 1 == len(f2_segments)):
return len(f2_segments) - len(f1_segments)
else:
if f1_segments[segment_ptr] < f2_segments[segment_ptr]:
return 1
elif f1_segments[segment_ptr] > f2_segments[segment_ptr]:
return -1
else:
return 0
def SetCachingHeaders(self, revalidate):
"""Set caching headers for the request."""
max_age = self.MAX_AGE
#self.response.headers['Expires'] = email.Utils.formatdate(
# time.time() + max_age, usegmt=True)
cache_control = []
if self.PUBLIC:
cache_control.append('public')
cache_control.append('max-age=%d' % max_age)
if revalidate:
cache_control.append('must-revalidate')
self.response.headers['Cache-Control'] = ', '.join(cache_control)
def GetFromCache(self, filename):
"""Get file from memcache, if available.
Args:
filename: The URL of the file to return
Returns:
The content of the file
"""
return memcache.get('%s%s' % (self.CACHE_PREFIX, filename))
def StoreOrUpdateInCache(self, filename, data):
"""Store data in the cache.
Store a piece of data in the memcache. Memcache has a maximum item size of
1*10^6 bytes. If the data is too large, fail, but log the failure. Future
work will consider compressing the data before storing or chunking it
Args:
filename: the name of the file to store
data: the data of the file
Returns:
None
"""
try:
if not memcache.add('%s%s' % (self.CACHE_PREFIX, filename), data):
memcache.replace('%s%s' % (self.CACHE_PREFIX, filename), data)
except (ValueError), err:
logging.warning('Data size too large to cache\n%s' % err)
def Write404Error(self):
"""Ouptut a simple 404 response."""
self.error(404)
self.response.out.write(
''.join(['<html><head><title>404: Not Found</title></head>',
'<body><b><h2>Error 404</h2><br/>',
'File not found</b></body></html>']))
def StoreInNegativeCache(self, filename):
"""If a non-existant URL is accessed, cache this result as well.
Future work should consider setting a maximum negative cache size to
prevent it from from negatively impacting the real cache.
Args:
filename: URL to add ot negative cache
Returns:
None
"""
memcache.add('%s%s' % (self.NEG_CACHE_PREFIX, filename), -1)
def GetFromNegativeCache(self, filename):
"""Retrieve from negative cache.
Args:
filename: URL to retreive
Returns:
The file contents if present in the negative cache.
"""
return memcache.get('%s%s' % (self.NEG_CACHE_PREFIX, filename))
def main():
application = webapp.WSGIApplication([('/([^/]+)/(.*)',
MemcachedZipHandler)])
util.run_wsgi_app(application)
if __name__ == '__main__':
main()

105
scripts/combine_sdks.sh Executable file
View File

@@ -0,0 +1,105 @@
#!/bin/bash
function replace()
{
echo replacing $1
rm $V -rf "$UNZIPPED_BASE_DIR"/$1
cp $V -rf "$UNZIPPED_IMAGE_DIR"/$1 "$UNZIPPED_BASE_DIR"/$1
}
V=""
Q="-q"
if [ "$1" == "-v" ]; then
V="-v"
Q=""
shift
fi
NOZIP=""
if [ "$1" == "-nozip" ]; then
NOZIP="1"
shift
fi
BASE="$1"
IMAGES="$2"
OUTPUT="$3"
if [[ -z "$BASE" || -z "$IMAGES" || -z "$OUTPUT" ]] ; then
echo "usage: combine_sdks.sh [-v] [-nozip] BASE IMAGES OUTPUT"
echo
echo " BASE and IMAGES should be sdk zip files. The system image files,"
echo " emulator and other runtime files will be copied from IMAGES and"
echo " everything else will be copied from BASE. All of this will be"
echo " bundled into OUTPUT and zipped up again (unless -nozip is specified)."
echo
exit 1
fi
TMP=$(mktemp -d)
TMP_ZIP=tmp.zip
# determine executable extension
case `uname -s` in
*_NT-*) # for Windows
EXE=.exe
;;
*)
EXE=
;;
esac
BASE_DIR="$TMP"/base
IMAGES_DIR="$TMP"/images
OUTPUT_TMP_ZIP="$BASE_DIR/$TMP_ZIP"
unzip $Q "$BASE" -d "$BASE_DIR"
unzip $Q "$IMAGES" -d "$IMAGES_DIR"
UNZIPPED_BASE_DIR=$(echo "$BASE_DIR"/*)
UNZIPPED_IMAGE_DIR=$(echo "$IMAGES_DIR"/*)
#
# The commands to copy over the files that we want
#
# replace tools/emulator # at this time we do not want the exe from SDK1.x
replace tools/lib/images
replace tools/lib/res
replace tools/lib/fonts
replace tools/lib/layoutlib.jar
replace docs
replace android.jar
for i in widgets categories broadcast_actions service_actions; do
replace tools/lib/$i.txt
done
if [ -d "$UNZIPPED_BASE_DIR"/usb_driver ]; then
replace usb_driver
fi
#
# end
#
if [ -z "$NOZIP" ]; then
pushd "$BASE_DIR" &> /dev/null
# rename the directory to the leaf minus the .zip of OUTPUT
LEAF=$(echo "$OUTPUT" | sed -e "s:.*\.zip/::" | sed -e "s:.zip$::")
mv * "$LEAF"
# zip it
zip $V -qr "$TMP_ZIP" "$LEAF"
popd &> /dev/null
cp $V "$OUTPUT_TMP_ZIP" "$OUTPUT"
echo "Combined SDK available at $OUTPUT"
else
OUT_DIR="${OUTPUT//.zip/}"
mv $V "$BASE_DIR"/* "$OUT_DIR"
echo "Unzipped combined SDK available at $OUT_DIR"
fi
rm $V -rf "$TMP"

366
scripts/divide_and_compress.py Executable file
View File

@@ -0,0 +1,366 @@
#!/usr/bin/python2.4
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Module to compress directories in to series of zip files.
This module will take a directory and compress all its contents, including
child directories into a series of zip files named N.zip where 'N' ranges from
0 to infinity. The zip files will all be below a certain specified maximum
threshold.
The directory is compressed with a depth first traversal, each directory's
file contents being compressed as it is visisted, before the compression of any
child directory's contents. In this way the files within an archive are ordered
and the archives themselves are ordered.
The class also constructs a 'main.py' file intended for use with Google App
Engine with a custom App Engine program not currently distributed with this
code base. The custom App Engine runtime can leverage the index files written
out by this class to more quickly locate which zip file to serve a given URL
from.
"""
__author__ = 'jmatt@google.com (Justin Mattson)'
import optparse
import os
import stat
import sys
import zipfile
import divide_and_compress_constants
def CreateOptionsParser():
"""Creates the parser for command line arguments.
Returns:
A configured optparse.OptionParser object.
"""
rtn = optparse.OptionParser()
rtn.add_option('-s', '--sourcefiles', dest='sourcefiles', default=None,
help='The directory containing the files to compress')
rtn.add_option('-d', '--destination', dest='destination', default=None,
help=('Where to put the archive files, this should not be'
' a child of where the source files exist.'))
rtn.add_option('-f', '--filesize', dest='filesize', default='1M',
help=('Maximum size of archive files. A number followed by '
'a magnitude indicator either "B", "K", "M", or "G". '
'Examples:\n 1000000B == one million BYTES\n'
' 1.2M == one point two MEGABYTES\n'
' 1M == 1048576 BYTES'))
rtn.add_option('-n', '--nocompress', action='store_false', dest='compress',
default=True,
help=('Whether the archive files should be compressed, or '
'just a concatenation of the source files'))
return rtn
def VerifyArguments(options, parser):
"""Runs simple checks on correctness of commandline arguments.
Args:
options: The command line options passed.
parser: The parser object used to parse the command string.
"""
try:
if options.sourcefiles is None or options.destination is None:
parser.print_help()
sys.exit(-1)
except AttributeError:
parser.print_help()
sys.exit(-1)
def ParseSize(size_str):
"""Parse the file size argument from a string to a number of bytes.
Args:
size_str: The string representation of the file size.
Returns:
The file size in bytes.
Raises:
ValueError: Raises an error if the numeric or qualifier portions of the
file size argument is invalid.
"""
if len(size_str) < 2:
raise ValueError(('filesize argument not understood, please include'
' a numeric value and magnitude indicator'))
magnitude = size_str[-1]
if not magnitude in ('B', 'K', 'M', 'G'):
raise ValueError(('filesize magnitude indicator not valid, must be "B",'
'"K","M", or "G"'))
numeral = float(size_str[:-1])
if magnitude == 'K':
numeral *= 1024
elif magnitude == 'M':
numeral *= 1048576
elif magnitude == 'G':
numeral *= 1073741824
return int(numeral)
class DirectoryZipper(object):
"""Class to compress a directory and all its sub-directories."""
def __init__(self, output_path, base_dir, archive_size, enable_compression):
"""DirectoryZipper constructor.
Args:
output_path: A string, the path to write the archives and index file to.
base_dir: A string, the directory to compress.
archive_size: An number, the maximum size, in bytes, of a single
archive file.
enable_compression: A boolean, whether or not compression should be
enabled, if disabled, the files will be written into an uncompresed
zip.
"""
self.output_dir = output_path
self.current_archive = '0.zip'
self.base_path = base_dir
self.max_size = archive_size
self.compress = enable_compression
# Set index_fp to None, because we don't know what it will be yet.
self.index_fp = None
def StartCompress(self):
"""Start compress of the directory.
This will start the compression process and write the archives to the
specified output directory. It will also produce an 'index.txt' file in the
output directory that maps from file to archive.
"""
self.index_fp = open(os.path.join(self.output_dir, 'main.py'), 'w')
self.index_fp.write(divide_and_compress_constants.file_preamble)
os.path.walk(self.base_path, self.CompressDirectory, 1)
self.index_fp.write(divide_and_compress_constants.file_endpiece)
self.index_fp.close()
def RemoveLastFile(self, archive_path=None):
"""Removes the last item in the archive.
This removes the last item in the archive by reading the items out of the
archive, adding them to a new archive, deleting the old archive, and
moving the new archive to the location of the old archive.
Args:
archive_path: Path to the archive to modify. This archive should not be
open elsewhere, since it will need to be deleted.
Returns:
A new ZipFile object that points to the modified archive file.
"""
if archive_path is None:
archive_path = os.path.join(self.output_dir, self.current_archive)
# Move the old file and create a new one at its old location.
root, ext = os.path.splitext(archive_path)
old_archive = ''.join([root, '-old', ext])
os.rename(archive_path, old_archive)
old_fp = self.OpenZipFileAtPath(old_archive, mode='r')
# By default, store uncompressed.
compress_bit = zipfile.ZIP_STORED
if self.compress:
compress_bit = zipfile.ZIP_DEFLATED
new_fp = self.OpenZipFileAtPath(archive_path,
mode='w',
compress=compress_bit)
# Read the old archive in a new archive, except the last one.
for zip_member in old_fp.infolist()[:-1]:
new_fp.writestr(zip_member, old_fp.read(zip_member.filename))
# Close files and delete the old one.
old_fp.close()
new_fp.close()
os.unlink(old_archive)
def OpenZipFileAtPath(self, path, mode=None, compress=zipfile.ZIP_DEFLATED):
"""This method is mainly for testing purposes, eg dependency injection."""
if mode is None:
if os.path.exists(path):
mode = 'a'
else:
mode = 'w'
if mode == 'r':
return zipfile.ZipFile(path, mode)
else:
return zipfile.ZipFile(path, mode, compress)
def CompressDirectory(self, unused_id, dir_path, dir_contents):
"""Method to compress the given directory.
This method compresses the directory 'dir_path'. It will add to an existing
zip file that still has space and create new ones as necessary to keep zip
file sizes under the maximum specified size. This also writes out the
mapping of files to archives to the self.index_fp file descriptor
Args:
unused_id: A numeric identifier passed by the os.path.walk method, this
is not used by this method.
dir_path: A string, the path to the directory to compress.
dir_contents: A list of directory contents to be compressed.
"""
# Construct the queue of files to be added that this method will use
# it seems that dir_contents is given in reverse alphabetical order,
# so put them in alphabetical order by inserting to front of the list.
dir_contents.sort()
zip_queue = []
for filename in dir_contents:
zip_queue.append(os.path.join(dir_path, filename))
compress_bit = zipfile.ZIP_DEFLATED
if not self.compress:
compress_bit = zipfile.ZIP_STORED
# Zip all files in this directory, adding to existing archives and creating
# as necessary.
while zip_queue:
target_file = zip_queue[0]
if os.path.isfile(target_file):
self.AddFileToArchive(target_file, compress_bit)
# See if adding the new file made our archive too large.
if not self.ArchiveIsValid():
# IF fixing fails, the last added file was to large, skip it
# ELSE the current archive filled normally, make a new one and try
# adding the file again.
if not self.FixArchive('SIZE'):
zip_queue.pop(0)
else:
self.current_archive = '%i.zip' % (
int(self.current_archive[
0:self.current_archive.rfind('.zip')]) + 1)
else:
# Write an index record if necessary.
self.WriteIndexRecord()
zip_queue.pop(0)
else:
zip_queue.pop(0)
def WriteIndexRecord(self):
"""Write an index record to the index file.
Only write an index record if this is the first file to go into archive
Returns:
True if an archive record is written, False if it isn't.
"""
archive = self.OpenZipFileAtPath(
os.path.join(self.output_dir, self.current_archive), 'r')
archive_index = archive.infolist()
if len(archive_index) == 1:
self.index_fp.write(
'[\'%s\', \'%s\'],\n' % (self.current_archive,
archive_index[0].filename))
archive.close()
return True
else:
archive.close()
return False
def FixArchive(self, problem):
"""Make the archive compliant.
Args:
problem: An enum, the reason the archive is invalid.
Returns:
Whether the file(s) removed to fix the archive could conceivably be
in an archive, but for some reason can't be added to this one.
"""
archive_path = os.path.join(self.output_dir, self.current_archive)
return_value = None
if problem == 'SIZE':
archive_obj = self.OpenZipFileAtPath(archive_path, mode='r')
num_archive_files = len(archive_obj.infolist())
# IF there is a single file, that means its too large to compress,
# delete the created archive
# ELSE do normal finalization.
if num_archive_files == 1:
print ('WARNING: %s%s is too large to store.' % (
self.base_path, archive_obj.infolist()[0].filename))
archive_obj.close()
os.unlink(archive_path)
return_value = False
else:
archive_obj.close()
self.RemoveLastFile(
os.path.join(self.output_dir, self.current_archive))
print 'Final archive size for %s is %i' % (
self.current_archive, os.path.getsize(archive_path))
return_value = True
return return_value
def AddFileToArchive(self, filepath, compress_bit):
"""Add the file at filepath to the current archive.
Args:
filepath: A string, the path of the file to add.
compress_bit: A boolean, whether or not this file should be compressed
when added.
Returns:
True if the file could be added (typically because this is a file) or
False if it couldn't be added (typically because its a directory).
"""
curr_archive_path = os.path.join(self.output_dir, self.current_archive)
if os.path.isfile(filepath) and not os.path.islink(filepath):
if os.path.getsize(filepath) > 1048576:
print 'Warning: %s is potentially too large to serve on GAE' % filepath
archive = self.OpenZipFileAtPath(curr_archive_path,
compress=compress_bit)
# Add the file to the archive.
archive.write(filepath, filepath[len(self.base_path):])
archive.close()
return True
else:
return False
def ArchiveIsValid(self):
"""Check whether the archive is valid.
Currently this only checks whether the archive is under the required size.
The thought is that eventually this will do additional validation
Returns:
True if the archive is valid, False if its not.
"""
archive_path = os.path.join(self.output_dir, self.current_archive)
return os.path.getsize(archive_path) <= self.max_size
def main(argv):
parser = CreateOptionsParser()
(options, unused_args) = parser.parse_args(args=argv[1:])
VerifyArguments(options, parser)
zipper = DirectoryZipper(options.destination,
options.sourcefiles,
ParseSize(options.filesize),
options.compress)
zipper.StartCompress()
if __name__ == '__main__':
main(sys.argv)

View File

@@ -0,0 +1,58 @@
#!/usr/bin/python2.4
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Constants for the divide_and_compress script and DirectoryZipper class."""
__author__ = 'jmatt@google.com (Justin Mattson)'
file_preamble = """#!/usr/bin/env python
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an \"AS IS\" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import wsgiref.handlers\n'
from google.appengine.ext import zipserve\n'
from google.appengine.ext import webapp\n'
import memcache_zipserve\n\n\n'
class MainHandler(webapp.RequestHandler):
def get(self):
self.response.out.write('Hello world!')
def main():
application = webapp.WSGIApplication(['/(.*)',
memcache_zipserve.create_handler(["""
file_endpiece = """])),
],
debug=False)
wsgiref.handlers.CGIHandler().run(application)
if __name__ == __main__:
main()"""

View File

@@ -0,0 +1,489 @@
#!/usr/bin/python2.4
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for divide_and_compress.py.
TODO(jmatt): Add tests for module methods.
"""
__author__ = 'jmatt@google.com (Justin Mattson)'
import os
import stat
import unittest
import zipfile
import divide_and_compress
import mox
class BagOfParts(object):
"""Just a generic class that I can use to assign random attributes to."""
def NoOp(self):
x = 1
class ValidAndRemoveTests(unittest.TestCase):
"""Test the ArchiveIsValid and RemoveLastFile methods."""
def setUp(self):
"""Prepare the test.
Construct some mock objects for use with the tests.
"""
self.my_mox = mox.Mox()
file1 = BagOfParts()
file1.filename = 'file1.txt'
file1.contents = 'This is a test file'
file2 = BagOfParts()
file2.filename = 'file2.txt'
file2.contents = ('akdjfk;djsf;kljdslkfjslkdfjlsfjkdvn;kn;2389rtu4i'
'tn;ghf8:89H*hp748FJw80fu9WJFpwf39pujens;fihkhjfk'
'sdjfljkgsc n;iself')
self.files = {'file1': file1, 'file2': file2}
def tearDown(self):
"""Remove any stubs we've created."""
self.my_mox.UnsetStubs()
def testArchiveIsValid(self):
"""Test the DirectoryZipper.ArchiveIsValid method.
Run two tests, one that we expect to pass and one that we expect to fail
"""
test_file_size = 1056730
self.my_mox.StubOutWithMock(os, 'stat')
os.stat('/foo/0.zip').AndReturn([test_file_size])
self.my_mox.StubOutWithMock(stat, 'ST_SIZE')
stat.ST_SIZE = 0
os.stat('/baz/0.zip').AndReturn([test_file_size])
mox.Replay(os.stat)
test_target = divide_and_compress.DirectoryZipper('/foo/', 'bar',
test_file_size - 1, True)
self.assertEqual(False, test_target.ArchiveIsValid(),
msg=('ERROR: Test failed, ArchiveIsValid should have '
'returned false, but returned true'))
test_target = divide_and_compress.DirectoryZipper('/baz/', 'bar',
test_file_size + 1, True)
self.assertEqual(True, test_target.ArchiveIsValid(),
msg=('ERROR: Test failed, ArchiveIsValid should have'
' returned true, but returned false'))
def testRemoveLastFile(self):
"""Test DirectoryZipper.RemoveLastFile method.
Construct a ZipInfo mock object with two records, verify that write is
only called once on the new ZipFile object.
"""
source = self.CreateZipSource()
dest = self.CreateZipDestination()
source_path = ''.join([os.getcwd(), '/0-old.zip'])
dest_path = ''.join([os.getcwd(), '/0.zip'])
test_target = divide_and_compress.DirectoryZipper(
''.join([os.getcwd(), '/']), 'dummy', 1024*1024, True)
self.my_mox.StubOutWithMock(test_target, 'OpenZipFileAtPath')
test_target.OpenZipFileAtPath(source_path, mode='r').AndReturn(source)
test_target.OpenZipFileAtPath(dest_path,
compress=zipfile.ZIP_DEFLATED,
mode='w').AndReturn(dest)
self.my_mox.StubOutWithMock(os, 'rename')
os.rename(dest_path, source_path)
self.my_mox.StubOutWithMock(os, 'unlink')
os.unlink(source_path)
self.my_mox.ReplayAll()
test_target.RemoveLastFile()
self.my_mox.VerifyAll()
def CreateZipSource(self):
"""Create a mock zip sourec object.
Read should only be called once, because the second file is the one
being removed.
Returns:
A configured mocked
"""
source_zip = self.my_mox.CreateMock(zipfile.ZipFile)
source_zip.infolist().AndReturn([self.files['file1'], self.files['file1']])
source_zip.infolist().AndReturn([self.files['file1'], self.files['file1']])
source_zip.read(self.files['file1'].filename).AndReturn(
self.files['file1'].contents)
source_zip.close()
return source_zip
def CreateZipDestination(self):
"""Create mock destination zip.
Write should only be called once, because there are two files in the
source zip and we expect the second to be removed.
Returns:
A configured mocked
"""
dest_zip = mox.MockObject(zipfile.ZipFile)
dest_zip.writestr(self.files['file1'].filename,
self.files['file1'].contents)
dest_zip.close()
return dest_zip
class FixArchiveTests(unittest.TestCase):
"""Tests for the DirectoryZipper.FixArchive method."""
def setUp(self):
"""Create a mock file object."""
self.my_mox = mox.Mox()
self.file1 = BagOfParts()
self.file1.filename = 'file1.txt'
self.file1.contents = 'This is a test file'
def tearDown(self):
"""Unset any mocks that we've created."""
self.my_mox.UnsetStubs()
def _InitMultiFileData(self):
"""Create an array of mock file objects.
Create three mock file objects that we can use for testing.
"""
self.multi_file_dir = []
file1 = BagOfParts()
file1.filename = 'file1.txt'
file1.contents = 'kjaskl;jkdjfkja;kjsnbvjnvnbuewklriujalvjsd'
self.multi_file_dir.append(file1)
file2 = BagOfParts()
file2.filename = 'file2.txt'
file2.contents = ('He entered the room and there in the center, it was.'
' Looking upon the thing, suddenly he could not remember'
' whether he had actually seen it before or whether'
' his memory of it was merely the effect of something'
' so often being imagined that it had long since become '
' manifest in his mind.')
self.multi_file_dir.append(file2)
file3 = BagOfParts()
file3.filename = 'file3.txt'
file3.contents = 'Whoa, what is \'file2.txt\' all about?'
self.multi_file_dir.append(file3)
def testSingleFileArchive(self):
"""Test behavior of FixArchive when the archive has a single member.
We expect that when this method is called with an archive that has a
single member that it will return False and unlink the archive.
"""
test_target = divide_and_compress.DirectoryZipper(
''.join([os.getcwd(), '/']), 'dummy', 1024*1024, True)
self.my_mox.StubOutWithMock(test_target, 'OpenZipFileAtPath')
test_target.OpenZipFileAtPath(
''.join([os.getcwd(), '/0.zip']), mode='r').AndReturn(
self.CreateSingleFileMock())
self.my_mox.StubOutWithMock(os, 'unlink')
os.unlink(''.join([os.getcwd(), '/0.zip']))
self.my_mox.ReplayAll()
self.assertEqual(False, test_target.FixArchive('SIZE'))
self.my_mox.VerifyAll()
def CreateSingleFileMock(self):
"""Create a mock ZipFile object for testSingleFileArchive.
We just need it to return a single member infolist twice
Returns:
A configured mock object
"""
mock_zip = self.my_mox.CreateMock(zipfile.ZipFile)
mock_zip.infolist().AndReturn([self.file1])
mock_zip.infolist().AndReturn([self.file1])
mock_zip.close()
return mock_zip
def testMultiFileArchive(self):
"""Test behavior of DirectoryZipper.FixArchive with a multi-file archive.
We expect that FixArchive will rename the old archive, adding '-old' before
'.zip', read all the members except the last one of '-old' into a new
archive with the same name as the original, and then unlink the '-old' copy
"""
test_target = divide_and_compress.DirectoryZipper(
''.join([os.getcwd(), '/']), 'dummy', 1024*1024, True)
self.my_mox.StubOutWithMock(test_target, 'OpenZipFileAtPath')
test_target.OpenZipFileAtPath(
''.join([os.getcwd(), '/0.zip']), mode='r').AndReturn(
self.CreateMultiFileMock())
self.my_mox.StubOutWithMock(test_target, 'RemoveLastFile')
test_target.RemoveLastFile(''.join([os.getcwd(), '/0.zip']))
self.my_mox.StubOutWithMock(os, 'stat')
os.stat(''.join([os.getcwd(), '/0.zip'])).AndReturn([49302])
self.my_mox.StubOutWithMock(stat, 'ST_SIZE')
stat.ST_SIZE = 0
self.my_mox.ReplayAll()
self.assertEqual(True, test_target.FixArchive('SIZE'))
self.my_mox.VerifyAll()
def CreateMultiFileMock(self):
"""Create mock ZipFile object for use with testMultiFileArchive.
The mock just needs to return the infolist mock that is prepared in
InitMultiFileData()
Returns:
A configured mock object
"""
self._InitMultiFileData()
mock_zip = self.my_mox.CreateMock(zipfile.ZipFile)
mock_zip.infolist().AndReturn(self.multi_file_dir)
mock_zip.close()
return mock_zip
class AddFileToArchiveTest(unittest.TestCase):
"""Test behavior of method to add a file to an archive."""
def setUp(self):
"""Setup the arguments for the DirectoryZipper object."""
self.my_mox = mox.Mox()
self.output_dir = '%s/' % os.getcwd()
self.file_to_add = 'file.txt'
self.input_dir = '/foo/bar/baz/'
def tearDown(self):
self.my_mox.UnsetStubs()
def testAddFileToArchive(self):
"""Test the DirectoryZipper.AddFileToArchive method.
We are testing a pretty trivial method, we just expect it to look at the
file its adding, so that it possible can through out a warning.
"""
test_target = divide_and_compress.DirectoryZipper(self.output_dir,
self.input_dir,
1024*1024, True)
self.my_mox.StubOutWithMock(test_target, 'OpenZipFileAtPath')
archive_mock = self.CreateArchiveMock()
test_target.OpenZipFileAtPath(
''.join([self.output_dir, '0.zip']),
compress=zipfile.ZIP_DEFLATED).AndReturn(archive_mock)
self.StubOutOsModule()
self.my_mox.ReplayAll()
test_target.AddFileToArchive(''.join([self.input_dir, self.file_to_add]),
zipfile.ZIP_DEFLATED)
self.my_mox.VerifyAll()
def StubOutOsModule(self):
"""Create a mock for the os.path and os.stat objects.
Create a stub that will return the type (file or directory) and size of the
object that is to be added.
"""
self.my_mox.StubOutWithMock(os.path, 'isfile')
os.path.isfile(''.join([self.input_dir, self.file_to_add])).AndReturn(True)
self.my_mox.StubOutWithMock(os, 'stat')
os.stat(''.join([self.input_dir, self.file_to_add])).AndReturn([39480])
self.my_mox.StubOutWithMock(stat, 'ST_SIZE')
stat.ST_SIZE = 0
def CreateArchiveMock(self):
"""Create a mock ZipFile for use with testAddFileToArchive.
Just verify that write is called with the file we expect and that the
archive is closed after the file addition
Returns:
A configured mock object
"""
archive_mock = self.my_mox.CreateMock(zipfile.ZipFile)
archive_mock.write(''.join([self.input_dir, self.file_to_add]),
self.file_to_add)
archive_mock.close()
return archive_mock
class CompressDirectoryTest(unittest.TestCase):
"""Test the master method of the class.
Testing with the following directory structure.
/dir1/
/dir1/file1.txt
/dir1/file2.txt
/dir1/dir2/
/dir1/dir2/dir3/
/dir1/dir2/dir4/
/dir1/dir2/dir4/file3.txt
/dir1/dir5/
/dir1/dir5/file4.txt
/dir1/dir5/file5.txt
/dir1/dir5/file6.txt
/dir1/dir5/file7.txt
/dir1/dir6/
/dir1/dir6/file8.txt
file1.txt., file2.txt, file3.txt should be in 0.zip
file4.txt should be in 1.zip
file5.txt, file6.txt should be in 2.zip
file7.txt will not be stored since it will be too large compressed
file8.txt should b in 3.zip
"""
def setUp(self):
"""Setup all the mocks for this test."""
self.my_mox = mox.Mox()
self.base_dir = '/dir1'
self.output_path = '/out_dir/'
self.test_target = divide_and_compress.DirectoryZipper(
self.output_path, self.base_dir, 1024*1024, True)
self.InitArgLists()
self.InitOsDotPath()
self.InitArchiveIsValid()
self.InitWriteIndexRecord()
self.InitAddFileToArchive()
def tearDown(self):
self.my_mox.UnsetStubs()
def testCompressDirectory(self):
"""Test the DirectoryZipper.CompressDirectory method."""
self.my_mox.ReplayAll()
for arguments in self.argument_lists:
self.test_target.CompressDirectory(None, arguments[0], arguments[1])
self.my_mox.VerifyAll()
def InitAddFileToArchive(self):
"""Setup mock for DirectoryZipper.AddFileToArchive.
Make sure that the files are added in the order we expect.
"""
self.my_mox.StubOutWithMock(self.test_target, 'AddFileToArchive')
self.test_target.AddFileToArchive('/dir1/file1.txt', zipfile.ZIP_DEFLATED)
self.test_target.AddFileToArchive('/dir1/file2.txt', zipfile.ZIP_DEFLATED)
self.test_target.AddFileToArchive('/dir1/dir2/dir4/file3.txt',
zipfile.ZIP_DEFLATED)
self.test_target.AddFileToArchive('/dir1/dir5/file4.txt',
zipfile.ZIP_DEFLATED)
self.test_target.AddFileToArchive('/dir1/dir5/file4.txt',
zipfile.ZIP_DEFLATED)
self.test_target.AddFileToArchive('/dir1/dir5/file5.txt',
zipfile.ZIP_DEFLATED)
self.test_target.AddFileToArchive('/dir1/dir5/file5.txt',
zipfile.ZIP_DEFLATED)
self.test_target.AddFileToArchive('/dir1/dir5/file6.txt',
zipfile.ZIP_DEFLATED)
self.test_target.AddFileToArchive('/dir1/dir5/file7.txt',
zipfile.ZIP_DEFLATED)
self.test_target.AddFileToArchive('/dir1/dir5/file7.txt',
zipfile.ZIP_DEFLATED)
self.test_target.AddFileToArchive('/dir1/dir6/file8.txt',
zipfile.ZIP_DEFLATED)
def InitWriteIndexRecord(self):
"""Setup mock for DirectoryZipper.WriteIndexRecord."""
self.my_mox.StubOutWithMock(self.test_target, 'WriteIndexRecord')
# we are trying to compress 8 files, but we should only attempt to
# write an index record 7 times, because one file is too large to be stored
self.test_target.WriteIndexRecord().AndReturn(True)
self.test_target.WriteIndexRecord().AndReturn(False)
self.test_target.WriteIndexRecord().AndReturn(False)
self.test_target.WriteIndexRecord().AndReturn(True)
self.test_target.WriteIndexRecord().AndReturn(True)
self.test_target.WriteIndexRecord().AndReturn(False)
self.test_target.WriteIndexRecord().AndReturn(True)
def InitArchiveIsValid(self):
"""Mock out DirectoryZipper.ArchiveIsValid and DirectoryZipper.FixArchive.
Mock these methods out such that file1, file2, and file3 go into one
archive. file4 then goes into the next archive, file5 and file6 in the
next, file 7 should appear too large to compress into an archive, and
file8 goes into the final archive
"""
self.my_mox.StubOutWithMock(self.test_target, 'ArchiveIsValid')
self.my_mox.StubOutWithMock(self.test_target, 'FixArchive')
self.test_target.ArchiveIsValid().AndReturn(True)
self.test_target.ArchiveIsValid().AndReturn(True)
self.test_target.ArchiveIsValid().AndReturn(True)
# should be file4.txt
self.test_target.ArchiveIsValid().AndReturn(False)
self.test_target.FixArchive('SIZE').AndReturn(True)
self.test_target.ArchiveIsValid().AndReturn(True)
# should be file5.txt
self.test_target.ArchiveIsValid().AndReturn(False)
self.test_target.FixArchive('SIZE').AndReturn(True)
self.test_target.ArchiveIsValid().AndReturn(True)
self.test_target.ArchiveIsValid().AndReturn(True)
# should be file7.txt
self.test_target.ArchiveIsValid().AndReturn(False)
self.test_target.FixArchive('SIZE').AndReturn(True)
self.test_target.ArchiveIsValid().AndReturn(False)
self.test_target.FixArchive('SIZE').AndReturn(False)
self.test_target.ArchiveIsValid().AndReturn(True)
def InitOsDotPath(self):
"""Mock out os.path.isfile.
Mock this out so the things we want to appear as files appear as files and
the things we want to appear as directories appear as directories. Also
make sure that the order of file visits is as we expect (which is why
InAnyOrder isn't used here).
"""
self.my_mox.StubOutWithMock(os.path, 'isfile')
os.path.isfile('/dir1/dir2').AndReturn(False)
os.path.isfile('/dir1/dir5').AndReturn(False)
os.path.isfile('/dir1/dir6').AndReturn(False)
os.path.isfile('/dir1/file1.txt').AndReturn(True)
os.path.isfile('/dir1/file2.txt').AndReturn(True)
os.path.isfile('/dir1/dir2/dir3').AndReturn(False)
os.path.isfile('/dir1/dir2/dir4').AndReturn(False)
os.path.isfile('/dir1/dir2/dir4/file3.txt').AndReturn(True)
os.path.isfile('/dir1/dir5/file4.txt').AndReturn(True)
os.path.isfile('/dir1/dir5/file4.txt').AndReturn(True)
os.path.isfile('/dir1/dir5/file5.txt').AndReturn(True)
os.path.isfile('/dir1/dir5/file5.txt').AndReturn(True)
os.path.isfile('/dir1/dir5/file6.txt').AndReturn(True)
os.path.isfile('/dir1/dir5/file7.txt').AndReturn(True)
os.path.isfile('/dir1/dir5/file7.txt').AndReturn(True)
os.path.isfile('/dir1/dir6/file8.txt').AndReturn(True)
def InitArgLists(self):
"""Create the directory path => directory contents mappings."""
self.argument_lists = []
self.argument_lists.append(['/dir1',
['file1.txt', 'file2.txt', 'dir2', 'dir5',
'dir6']])
self.argument_lists.append(['/dir1/dir2', ['dir3', 'dir4']])
self.argument_lists.append(['/dir1/dir2/dir3', []])
self.argument_lists.append(['/dir1/dir2/dir4', ['file3.txt']])
self.argument_lists.append(['/dir1/dir5',
['file4.txt', 'file5.txt', 'file6.txt',
'file7.txt']])
self.argument_lists.append(['/dir1/dir6', ['file8.txt']])
if __name__ == '__main__':
unittest.main()