Initial Cookiecutter Commit.
Change-Id: I58c1914ca033c2c40ba2b63a3c07e0a9a8397ba4
This commit is contained in:
parent
c0809470d1
commit
403102e8e9
7
.coveragerc
Normal file
7
.coveragerc
Normal file
@ -0,0 +1,7 @@
|
||||
[run]
|
||||
branch = True
|
||||
source = storlets
|
||||
omit = storlets/openstack/*
|
||||
|
||||
[report]
|
||||
ignore-errors = True
|
54
.gitignore
vendored
Normal file
54
.gitignore
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
*.py[cod]
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Packages
|
||||
*.egg
|
||||
*.egg-info
|
||||
dist
|
||||
build
|
||||
.eggs
|
||||
eggs
|
||||
parts
|
||||
bin
|
||||
var
|
||||
sdist
|
||||
develop-eggs
|
||||
.installed.cfg
|
||||
lib
|
||||
lib64
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
.coverage
|
||||
.tox
|
||||
nosetests.xml
|
||||
.testrepository
|
||||
.venv
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
||||
# Mr Developer
|
||||
.mr.developer.cfg
|
||||
.project
|
||||
.pydevproject
|
||||
|
||||
# Complexity
|
||||
output/*.html
|
||||
output/*/index.html
|
||||
|
||||
# Sphinx
|
||||
doc/build
|
||||
|
||||
# pbr generates these
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
|
||||
# Editors
|
||||
*~
|
||||
.*.swp
|
||||
.*sw?
|
4
.gitreview
Normal file
4
.gitreview
Normal file
@ -0,0 +1,4 @@
|
||||
[gerrit]
|
||||
host=review.openstack.org
|
||||
port=29418
|
||||
project=openstack/storlets.git
|
3
.mailmap
Normal file
3
.mailmap
Normal file
@ -0,0 +1,3 @@
|
||||
# Format is:
|
||||
# <preferred e-mail> <other e-mail 1>
|
||||
# <preferred e-mail> <other e-mail 2>
|
7
.testr.conf
Normal file
7
.testr.conf
Normal file
@ -0,0 +1,7 @@
|
||||
[DEFAULT]
|
||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
||||
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
||||
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
|
||||
${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
17
CONTRIBUTING.rst
Normal file
17
CONTRIBUTING.rst
Normal file
@ -0,0 +1,17 @@
|
||||
If you would like to contribute to the development of OpenStack, you must
|
||||
follow the steps in this page:
|
||||
|
||||
http://docs.openstack.org/infra/manual/developers.html
|
||||
|
||||
If you already have a good understanding of how the system works and your
|
||||
OpenStack accounts are set up, you can skip to the development workflow
|
||||
section of this documentation to learn how changes to OpenStack should be
|
||||
submitted for review via the Gerrit tool:
|
||||
|
||||
http://docs.openstack.org/infra/manual/developers.html#development-workflow
|
||||
|
||||
Pull requests submitted through GitHub will be ignored.
|
||||
|
||||
Bugs should be filed on Launchpad, not GitHub:
|
||||
|
||||
https://bugs.launchpad.net/storlets
|
@ -1,6 +1,6 @@
|
||||
ibm_container_install_dir: opt/ibm
|
||||
lxc_device: /home/docker_device
|
||||
storlet_source_dir: ~/swift-storlets
|
||||
storlet_source_dir: ~/storlets
|
||||
python_dist_packages_dir: usr/local/lib/python2.7/dist-packages
|
||||
storlet_gateway_conf_file: /etc/swift/storlet_docker_gateway.conf
|
||||
|
||||
|
@ -19,50 +19,51 @@ Limitations under the License.
|
||||
@author: cdoron
|
||||
'''
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def extractId(tar_file_name, repository, tag):
|
||||
subprocess.call(['tar', 'xf', tar_file_name, 'repositories'])
|
||||
repository_file = open('repositories')
|
||||
j = json.loads(repository_file.read())
|
||||
|
||||
if not repository in j:
|
||||
print "Not Found"
|
||||
if repository not in j:
|
||||
print("Not Found")
|
||||
else:
|
||||
pairs = j[repository]
|
||||
if tag:
|
||||
if tag not in pairs:
|
||||
print "Not Found"
|
||||
print("Not Found")
|
||||
else:
|
||||
print pairs[tag]
|
||||
print(pairs[tag])
|
||||
else:
|
||||
if len(pairs) != 1:
|
||||
print "No tag supplied. Ambiguous"
|
||||
print("No tag supplied. Ambiguous")
|
||||
else:
|
||||
print pairs.values()[0]
|
||||
print(pairs.values()[0])
|
||||
|
||||
repository_file.close()
|
||||
subprocess.call(['rm', '-f', 'repositories'])
|
||||
|
||||
|
||||
def usage(argv):
|
||||
print argv[0] + " <tar_file> <repository> [tag]"
|
||||
print(argv[0] + " <tar_file> <repository> [tag]")
|
||||
|
||||
|
||||
def main(argv):
|
||||
if len(argv) < 3 or len(argv) > 4:
|
||||
usage(argv)
|
||||
return
|
||||
|
||||
|
||||
tar_file_name = argv[1]
|
||||
repository = argv[2]
|
||||
tag = None
|
||||
if len(argv) >= 4:
|
||||
tag = argv[3]
|
||||
|
||||
|
||||
extractId(tar_file_name, repository, tag)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
||||
|
||||
|
||||
|
@ -21,5 +21,5 @@
|
||||
command: docker pull {{ hostvars[groups['docker'][0]]['inventory_hostname'] }}:{{ docker_registry_port }}/{{ tenant_id.stdout_lines[0] }}
|
||||
|
||||
- name: shutdown_container
|
||||
shell: "{{ lxc_device }}/scripts/send_halt_cmd_to_daemon_factory.py
|
||||
shell: "/usr/bin/python {{ lxc_device }}/scripts/send_halt_cmd_to_daemon_factory.py
|
||||
{{ lxc_device }}/pipes/scopes/AUTH_{{ tenant_id.stdout_lines[0] }}/factory_pipe"
|
||||
|
@ -13,21 +13,22 @@ See the License for the specific language governing permissions and
|
||||
Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
'''
|
||||
@author: cdoron
|
||||
'''
|
||||
import ConfigParser
|
||||
import fileinput
|
||||
import os
|
||||
import sys
|
||||
import pwd
|
||||
import shutil
|
||||
import fileinput
|
||||
import ConfigParser
|
||||
import sys
|
||||
|
||||
|
||||
def _chown_to_swift(path):
|
||||
uc = pwd.getpwnam('swift')
|
||||
os.chown(path, uc.pw_uid, uc.pw_gid)
|
||||
|
||||
|
||||
def _unpatch_pipeline_line(orig_line, storlet_middleware):
|
||||
mds = list()
|
||||
for md in orig_line.split():
|
||||
@ -37,13 +38,14 @@ def _unpatch_pipeline_line(orig_line, storlet_middleware):
|
||||
|
||||
if storlet_middleware in mds:
|
||||
mds.remove(storlet_middleware)
|
||||
|
||||
|
||||
new_line = 'pipeline ='
|
||||
for md in mds:
|
||||
new_line += ' ' + md
|
||||
|
||||
return new_line + '\n'
|
||||
|
||||
|
||||
|
||||
def _patch_proxy_pipeline_line(orig_line, storlet_middleware):
|
||||
mds = list()
|
||||
for md in orig_line.split():
|
||||
@ -53,25 +55,26 @@ def _patch_proxy_pipeline_line(orig_line, storlet_middleware):
|
||||
|
||||
if storlet_middleware in mds:
|
||||
return orig_line
|
||||
|
||||
|
||||
try:
|
||||
slo_index = mds.index('slo')
|
||||
except Exception:
|
||||
slo_index = -1
|
||||
|
||||
|
||||
if slo_index != -1:
|
||||
mds.insert(slo_index, storlet_middleware)
|
||||
else:
|
||||
proxy_index = mds.index('proxy-server')
|
||||
mds.insert(proxy_index, storlet_middleware)
|
||||
|
||||
|
||||
new_line = 'pipeline ='
|
||||
for md in mds:
|
||||
new_line += ' ' + md
|
||||
|
||||
return new_line + '\n'
|
||||
|
||||
def _patch_object_pipeline_line(orig_line,storlet_middleware):
|
||||
|
||||
def _patch_object_pipeline_line(orig_line, storlet_middleware):
|
||||
mds = list()
|
||||
for md in orig_line.split():
|
||||
if md == 'pipeline' or md == '=':
|
||||
@ -80,125 +83,144 @@ def _patch_object_pipeline_line(orig_line,storlet_middleware):
|
||||
|
||||
if storlet_middleware in mds:
|
||||
return orig_line
|
||||
|
||||
|
||||
object_index = mds.index('object-server')
|
||||
mds.insert(object_index, storlet_middleware)
|
||||
|
||||
|
||||
new_line = 'pipeline ='
|
||||
for md in mds:
|
||||
new_line += ' ' + md
|
||||
|
||||
return new_line + '\n'
|
||||
|
||||
def unpatch_swift_config_file(conf, conf_file):
|
||||
storlet_middleware = conf.get('common-confs','storlet_middleware')
|
||||
filter_block_first_line = '[filter:%s]\n' % storlet_middleware
|
||||
|
||||
for line in fileinput.input(conf_file, inplace = 1):
|
||||
def unpatch_swift_config_file(conf, conf_file):
|
||||
storlet_middleware = conf.get('common-confs', 'storlet_middleware')
|
||||
|
||||
for line in fileinput.input(conf_file, inplace=1):
|
||||
if line.startswith('pipeline'):
|
||||
new_line = _unpatch_pipeline_line(line, storlet_middleware)
|
||||
line = new_line
|
||||
print line,
|
||||
sys.stdout.write(line)
|
||||
|
||||
_chown_to_swift(conf_file)
|
||||
|
||||
|
||||
|
||||
def patch_swift_config_file(conf, conf_file, service):
|
||||
storlet_middleware = conf.get('common-confs','storlet_middleware')
|
||||
storlet_gateway_implementation_class = conf.get('common-confs','storlet_gateway_module')
|
||||
storlet_middleware = conf.get('common-confs', 'storlet_middleware')
|
||||
filter_block_first_line = '[filter:%s]\n' % storlet_middleware
|
||||
|
||||
|
||||
filter_in_file = False
|
||||
for line in fileinput.input(conf_file, inplace = 1):
|
||||
for line in fileinput.input(conf_file, inplace=1):
|
||||
if line.startswith('pipeline'):
|
||||
if service == 'proxy':
|
||||
new_line = _patch_proxy_pipeline_line(line,storlet_middleware)
|
||||
new_line = _patch_proxy_pipeline_line(line, storlet_middleware)
|
||||
else:
|
||||
new_line = _patch_object_pipeline_line(line,storlet_middleware)
|
||||
new_line = _patch_object_pipeline_line(line,
|
||||
storlet_middleware)
|
||||
line = new_line
|
||||
if filter_block_first_line in line:
|
||||
filter_in_file = True
|
||||
print line,
|
||||
|
||||
if filter_in_file == False:
|
||||
sys.stdout.write(line)
|
||||
|
||||
if filter_in_file is False:
|
||||
with open(conf_file, 'a') as f:
|
||||
f.write('\n')
|
||||
f.write(filter_block_first_line)
|
||||
f.write('use = egg:storlets#%s\n' % storlet_middleware)
|
||||
f.write('storlet_container = %s\n' % conf.get('common-confs','storlet_container'))
|
||||
f.write('storlet_dependency = %s\n' % conf.get('common-confs','storlet_dependency'))
|
||||
f.write('storlet_timeout = %s\n' % conf.get('common-confs','storlet_timeout'))
|
||||
f.write('storlet_gateway_module = %s\n' % conf.get('common-confs','storlet_gateway_module'))
|
||||
f.write('storlet_gateway_conf = %s\n' % conf.get('common-confs','storlet_gateway_conf'))
|
||||
f.write('storlet_execute_on_proxy_only = %s\n' % conf.get('common-confs','storlet_proxy_execution'))
|
||||
f.write('storlet_container = %s\n' %
|
||||
conf.get('common-confs', 'storlet_container'))
|
||||
f.write('storlet_dependency = %s\n' %
|
||||
conf.get('common-confs', 'storlet_dependency'))
|
||||
f.write('storlet_timeout = %s\n' %
|
||||
conf.get('common-confs', 'storlet_timeout'))
|
||||
f.write('storlet_gateway_module = %s\n' %
|
||||
conf.get('common-confs', 'storlet_gateway_module'))
|
||||
f.write('storlet_gateway_conf = %s\n' %
|
||||
conf.get('common-confs', 'storlet_gateway_conf'))
|
||||
f.write('storlet_execute_on_proxy_only = %s\n' % conf.get(
|
||||
'common-confs', 'storlet_proxy_execution'))
|
||||
f.write('execution_server = %s\n' % service)
|
||||
|
||||
|
||||
_chown_to_swift(conf_file)
|
||||
|
||||
|
||||
def unpatch_swift_storlet_proxy_file(conf):
|
||||
storlet_proxy_server_conf_file = conf.get('proxy-confs','storlet_proxy_server_conf_file')
|
||||
storlet_proxy_server_conf_file = conf.get('proxy-confs',
|
||||
'storlet_proxy_server_conf_file')
|
||||
if os.path.exists(storlet_proxy_server_conf_file):
|
||||
os.remove(storlet_proxy_server_conf_file)
|
||||
|
||||
|
||||
|
||||
def patch_swift_storlet_proxy_file(conf):
|
||||
storlet_proxy_server_conf_file = conf.get('proxy-confs','storlet_proxy_server_conf_file')
|
||||
storlet_proxy_server_conf_file = conf.get('proxy-confs',
|
||||
'storlet_proxy_server_conf_file')
|
||||
proxy_server_conf_file = conf.get('proxy-confs', 'proxy_server_conf_file')
|
||||
|
||||
|
||||
source_file = proxy_server_conf_file
|
||||
target_file = storlet_proxy_server_conf_file
|
||||
shutil.copyfile(source_file, target_file)
|
||||
|
||||
for line in fileinput.input(storlet_proxy_server_conf_file, inplace = 1):
|
||||
if line.startswith('pipeline'):
|
||||
line= 'pipeline = proxy-logging cache storlet_handler slo proxy-logging proxy-server\n'
|
||||
print line,
|
||||
|
||||
for line in fileinput.input(storlet_proxy_server_conf_file, inplace=1):
|
||||
if line.startswith('pipeline'):
|
||||
line = 'pipeline = proxy-logging cache storlet_handler slo ' + \
|
||||
'proxy-logging proxy-server\n'
|
||||
sys.stdout.write(line)
|
||||
|
||||
_chown_to_swift(storlet_proxy_server_conf_file)
|
||||
|
||||
|
||||
|
||||
def remove_gateway_conf_file(conf):
|
||||
gateway_conf_file = conf.get('common-confs', 'storlet_gateway_conf')
|
||||
if os.path.exists(gateway_conf_file):
|
||||
os.remove(gateway_conf_file)
|
||||
|
||||
def remove(conf):
|
||||
object_server_conf_files = conf.get('object-confs', 'object_server_conf_files').split(',')
|
||||
|
||||
def remove(conf):
|
||||
object_server_conf_files = conf.get('object-confs',
|
||||
'object_server_conf_files').split(',')
|
||||
for f in object_server_conf_files:
|
||||
if os.path.exists(f):
|
||||
unpatch_swift_config_file(conf, f)
|
||||
|
||||
proxy_server_conf_file = conf.get('proxy-confs','proxy_server_conf_file')
|
||||
proxy_server_conf_file = conf.get('proxy-confs', 'proxy_server_conf_file')
|
||||
unpatch_swift_config_file(conf, proxy_server_conf_file)
|
||||
|
||||
|
||||
unpatch_swift_storlet_proxy_file(conf)
|
||||
remove_gateway_conf_file(conf)
|
||||
|
||||
|
||||
def install(conf):
|
||||
object_server_conf_files = conf.get('object-confs', 'object_server_conf_files').split(',')
|
||||
object_server_conf_files = conf.get('object-confs',
|
||||
'object_server_conf_files').split(',')
|
||||
for f in object_server_conf_files:
|
||||
if os.path.exists(f):
|
||||
if os.path.exists(f):
|
||||
patch_swift_config_file(conf, f, 'object')
|
||||
|
||||
proxy_server_conf_file = conf.get('proxy-confs','proxy_server_conf_file')
|
||||
|
||||
proxy_server_conf_file = conf.get('proxy-confs', 'proxy_server_conf_file')
|
||||
patch_swift_config_file(conf, proxy_server_conf_file, 'proxy')
|
||||
|
||||
patch_swift_storlet_proxy_file(conf)
|
||||
|
||||
|
||||
|
||||
def usage(argv):
|
||||
print "Usage: " + argv[0] + " install/remove conf_file"
|
||||
print("Usage: " + argv[0] + " install/remove conf_file")
|
||||
|
||||
|
||||
def main(argv):
|
||||
if len(argv) != 3:
|
||||
usage(argv)
|
||||
exit(-1)
|
||||
|
||||
|
||||
conf = ConfigParser.ConfigParser()
|
||||
conf.read(argv[2])
|
||||
|
||||
|
||||
if argv[1] == 'install':
|
||||
install(conf)
|
||||
elif argv[1] == 'remove':
|
||||
remove(conf)
|
||||
else:
|
||||
usage(argv)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
||||
|
@ -29,16 +29,17 @@ from ctypes import POINTER
|
||||
|
||||
|
||||
class SBus(object):
|
||||
'''
|
||||
@summary: This class wraps low level C-API for SBus functionality
|
||||
'''@summary: This class wraps low level C-API for SBus functionality
|
||||
|
||||
to be used with Python
|
||||
'''
|
||||
SBUS_SO_NAME = '/usr/local/lib/python2.7/dist-packages/sbus.so'
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def __init__(self):
|
||||
'''
|
||||
@summary: CTOR
|
||||
'''@summary: CTOR
|
||||
|
||||
Setup argument types mappings.
|
||||
'''
|
||||
|
||||
@ -46,12 +47,12 @@ class SBus(object):
|
||||
self.sbus_back_ = ctypes.CDLL(SBus.SBUS_SO_NAME)
|
||||
|
||||
# create SBus
|
||||
self.sbus_back_.sbus_create.argtypes = [c_char_p]
|
||||
self.sbus_back_.sbus_create.restype = c_int
|
||||
self.sbus_back_.sbus_create.argtypes = [c_char_p]
|
||||
self.sbus_back_.sbus_create.restype = c_int
|
||||
|
||||
# listen to SBus
|
||||
self.sbus_back_.sbus_listen.argtypes = [c_int]
|
||||
self.sbus_back_.sbus_listen.restype = c_int
|
||||
self.sbus_back_.sbus_listen.argtypes = [c_int]
|
||||
self.sbus_back_.sbus_listen.restype = c_int
|
||||
|
||||
# send message
|
||||
self.sbus_back_.sbus_send_msg.argtypes = [c_char_p,
|
||||
@ -61,7 +62,7 @@ class SBus(object):
|
||||
c_int,
|
||||
c_char_p,
|
||||
c_int]
|
||||
self.sbus_back_.sbus_send_msg.restype = c_int
|
||||
self.sbus_back_.sbus_send_msg.restype = c_int
|
||||
|
||||
# receive message
|
||||
self.sbus_back_.sbus_recv_msg.argtypes = [c_int,
|
||||
@ -71,13 +72,13 @@ class SBus(object):
|
||||
POINTER(c_int),
|
||||
POINTER(c_char_p),
|
||||
POINTER(c_int)]
|
||||
self.sbus_back_.sbus_recv_msg.restype = c_int
|
||||
self.sbus_back_.sbus_recv_msg.restype = c_int
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
@staticmethod
|
||||
def start_logger(str_log_level='DEBUG', container_id=None):
|
||||
'''
|
||||
@summary: Start logger.
|
||||
'''@summary: Start logger.
|
||||
|
||||
@param str_log_level: The level of verbosity in log records.
|
||||
Default value - 'DEBUG'.
|
||||
@ -92,10 +93,10 @@ class SBus(object):
|
||||
sbus_back_.sbus_start_logger(str_log_level, container_id)
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
@staticmethod
|
||||
def stop_logger():
|
||||
'''
|
||||
@summary: Stop logger.
|
||||
'''@summary: Stop logger.
|
||||
|
||||
@rtype: void
|
||||
'''
|
||||
@ -104,9 +105,9 @@ class SBus(object):
|
||||
sbus_back_.sbus_stop_logger()
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def create(self, sbus_name):
|
||||
'''
|
||||
@summary: Instantiate an SBus. A wrapper for C function.
|
||||
'''@summary: Instantiate an SBus. A wrapper for C function.
|
||||
|
||||
@param sbus_name: Path to domain socket "file".
|
||||
@type sbus_name: string
|
||||
@ -117,9 +118,10 @@ class SBus(object):
|
||||
return self.sbus_back_.sbus_create(sbus_name)
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def listen(self, sbus_handler):
|
||||
'''
|
||||
@summary: Listen to the SBus.
|
||||
'''@summary: Listen to the SBus.
|
||||
|
||||
Suspend the executing thread.
|
||||
|
||||
@param sbus_handler: Handler to SBus to listen.
|
||||
@ -131,9 +133,10 @@ class SBus(object):
|
||||
return self.sbus_back_.sbus_listen(sbus_handler)
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def receive(self, sbus_handler):
|
||||
'''
|
||||
@summary: Read the data from SBus.
|
||||
'''@summary: Read the data from SBus.
|
||||
|
||||
Create a datagram.
|
||||
|
||||
@param sbus_handler: Handler to SBus to read data from.
|
||||
@ -142,12 +145,12 @@ class SBus(object):
|
||||
@return: An object with the obtained data. Null-able.
|
||||
@rtype: SBusDatagram
|
||||
'''
|
||||
ph_files = POINTER(c_int)()
|
||||
ph_files = POINTER(c_int)()
|
||||
pp_metadata = (c_char_p)()
|
||||
pp_params = (c_char_p)()
|
||||
pn_files = (c_int)()
|
||||
pp_params = (c_char_p)()
|
||||
pn_files = (c_int)()
|
||||
pn_metadata = (c_int)()
|
||||
pn_params = (c_int)()
|
||||
pn_params = (c_int)()
|
||||
|
||||
# Invoke C function
|
||||
n_status = self.sbus_back_.sbus_recv_msg(sbus_handler,
|
||||
@ -157,7 +160,7 @@ class SBus(object):
|
||||
pn_metadata,
|
||||
pp_params,
|
||||
pn_params)
|
||||
result_dtg = None
|
||||
result_dtg = None
|
||||
if 0 <= n_status:
|
||||
# The invocation was successful.
|
||||
# De-serialize the data
|
||||
@ -169,10 +172,10 @@ class SBus(object):
|
||||
h_files.append(ph_files[i])
|
||||
|
||||
# Extract Python strings
|
||||
n_metadata = pn_metadata.value
|
||||
n_metadata = pn_metadata.value
|
||||
str_metadata = pp_metadata.value
|
||||
n_params = pn_params.value
|
||||
str_params = pp_params.value
|
||||
n_params = pn_params.value
|
||||
str_params = pp_params.value
|
||||
|
||||
# Trim the junk out
|
||||
if 0 < n_metadata:
|
||||
@ -187,10 +190,11 @@ class SBus(object):
|
||||
return result_dtg
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
@staticmethod
|
||||
def send(sbus_name, datagram):
|
||||
'''
|
||||
@summary: Send the datagram through SBus.
|
||||
'''@summary: Send the datagram through SBus.
|
||||
|
||||
Serialize dictionaries into JSON strings.
|
||||
|
||||
@param sbus_name: Path to domain socket "file".
|
||||
@ -204,30 +208,30 @@ class SBus(object):
|
||||
|
||||
# Serialize the datagram into JSON strings and C integer array
|
||||
str_json_params = datagram.get_params_and_cmd_as_json()
|
||||
p_params = c_char_p(str_json_params)
|
||||
n_params = c_int(len(str_json_params))
|
||||
p_params = c_char_p(str_json_params)
|
||||
n_params = c_int(len(str_json_params))
|
||||
|
||||
n_files = c_int(0)
|
||||
h_files = None
|
||||
n_metadata = c_int(0)
|
||||
p_metadata = None
|
||||
n_files = c_int(0)
|
||||
h_files = None
|
||||
n_metadata = c_int(0)
|
||||
p_metadata = None
|
||||
|
||||
if datagram.get_num_files() > 0:
|
||||
str_json_metadata = datagram.get_files_metadata_as_json()
|
||||
p_metadata = c_char_p(str_json_metadata)
|
||||
n_metadata = c_int(len(str_json_metadata))
|
||||
p_metadata = c_char_p(str_json_metadata)
|
||||
n_metadata = c_int(len(str_json_metadata))
|
||||
|
||||
n_fds = datagram.get_num_files()
|
||||
n_files = c_int(n_fds)
|
||||
n_fds = datagram.get_num_files()
|
||||
n_files = c_int(n_fds)
|
||||
|
||||
file_fds = datagram.get_files()
|
||||
h_files = (c_int * n_fds)()
|
||||
file_fds = datagram.get_files()
|
||||
h_files = (c_int * n_fds)()
|
||||
|
||||
for i in range(n_fds):
|
||||
h_files[i] = file_fds[i]
|
||||
|
||||
# Invoke C function
|
||||
sbus = SBus()
|
||||
sbus = SBus()
|
||||
n_status = sbus.sbus_back_.sbus_send_msg(sbus_name,
|
||||
h_files,
|
||||
n_files,
|
||||
@ -237,5 +241,4 @@ class SBus(object):
|
||||
n_params)
|
||||
return n_status
|
||||
|
||||
|
||||
'''============================ END OF FILE ==============================='''
|
||||
|
@ -21,19 +21,19 @@ Limitations under the License.
|
||||
dictionary of dictionaries
|
||||
==========================================================================='''
|
||||
|
||||
import os
|
||||
import json
|
||||
import os
|
||||
import syslog
|
||||
|
||||
from SBusStorletCommand import SBUS_CMD_NOP
|
||||
from SBusFileDescription import SBUS_FD_OUTPUT_OBJECT
|
||||
from SBusStorletCommand import SBUS_CMD_NOP
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
class SBusDatagram(object):
|
||||
'''
|
||||
@summary: This class aggregates data to be transferred
|
||||
'''@summary: This class aggregates data to be transferred
|
||||
|
||||
using SBus functionality.
|
||||
'''
|
||||
|
||||
@ -41,9 +41,9 @@ class SBusDatagram(object):
|
||||
task_id_dict_key_name_ = 'taskId'
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def __init__(self):
|
||||
'''
|
||||
@summary: CTOR
|
||||
'''@summary: CTOR
|
||||
|
||||
@ivar e_command_ : A command to Storlet Daemon.
|
||||
@type e_command_ : Integer. SBusStorletCommand enumerated value.
|
||||
@ -59,19 +59,20 @@ class SBusDatagram(object):
|
||||
@invariant: Quantity of entries in files_metadata_ list
|
||||
is the same as in h_files_, i.e. n_files_.
|
||||
'''
|
||||
self.e_command_ = SBUS_CMD_NOP
|
||||
self.task_id_ = None
|
||||
self.h_files_ = None
|
||||
self.n_files_ = 0
|
||||
self.e_command_ = SBUS_CMD_NOP
|
||||
self.task_id_ = None
|
||||
self.h_files_ = None
|
||||
self.n_files_ = 0
|
||||
self.files_metadata_ = None
|
||||
self.exec_params_ = None
|
||||
self.exec_params_ = None
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
@staticmethod
|
||||
def create_service_datagram(command,
|
||||
outfd):
|
||||
'''
|
||||
@summary: Datagram static factory.
|
||||
'''@summary: Datagram static factory.
|
||||
|
||||
Create "service" datagram, i.e.
|
||||
- command shall be one of
|
||||
{PING, START/STOP/STATUS-DAEMON}
|
||||
@ -99,12 +100,13 @@ class SBusDatagram(object):
|
||||
return dtg
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def from_raw_data(self,
|
||||
h_files,
|
||||
str_json_metadata,
|
||||
str_json_params):
|
||||
'''
|
||||
@summary: CTOR
|
||||
'''@summary: CTOR
|
||||
|
||||
Construct object from file list and
|
||||
two JSON-encoded strings.
|
||||
|
||||
@ -122,10 +124,11 @@ class SBusDatagram(object):
|
||||
self.extract_params(str_json_params)
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def extract_metadata(self,
|
||||
str_json_metadata):
|
||||
'''
|
||||
@summary: Extract files_metadata array
|
||||
'''@summary: Extract files_metadata array
|
||||
|
||||
of dictionaries form a JSON string
|
||||
@requires: n_files_ has to be se
|
||||
|
||||
@ -142,9 +145,10 @@ class SBusDatagram(object):
|
||||
self.files_metadata_.append(json.loads(str_curr_metadata))
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def extract_params(self, str_json_params):
|
||||
'''
|
||||
@summary: Extract command field and exec_params
|
||||
'''@summary: Extract command field and exec_params
|
||||
|
||||
dictionary form a JSON string
|
||||
@param str_json_params: JSON encoding for the execution parameters.
|
||||
@type str_json_params: string.
|
||||
@ -169,9 +173,10 @@ class SBusDatagram(object):
|
||||
self.exec_params_ = None
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def get_params_and_cmd_as_json(self):
|
||||
'''
|
||||
@summary: Convert command field and execution parameters
|
||||
'''@summary: Convert command field and execution parameters
|
||||
|
||||
dictionary into JSON as the following -
|
||||
1. Copy exec_params_. Initialize the combined dictionary.
|
||||
2. Push the next pair into the combined dictionary
|
||||
@ -193,9 +198,10 @@ class SBusDatagram(object):
|
||||
return str_result
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def get_files_metadata_as_json(self):
|
||||
'''
|
||||
@summary: Encode the list of dictionaries into JSON as the following -
|
||||
'''@summary: Encode the list of dictionaries into JSON as the following
|
||||
|
||||
1. Create a combined dictionary (Integer-to-String)
|
||||
Key - index in the original list
|
||||
Value - JSON encoding of the certain dictionary
|
||||
@ -213,9 +219,9 @@ class SBusDatagram(object):
|
||||
return str_result
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def get_num_files(self):
|
||||
'''
|
||||
@summary: Getter.
|
||||
'''@summary: Getter.
|
||||
|
||||
@return: The quantity of file descriptors.
|
||||
@rtype: integer
|
||||
@ -223,9 +229,9 @@ class SBusDatagram(object):
|
||||
return self.n_files_
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def get_files(self):
|
||||
'''
|
||||
@summary: Getter.
|
||||
'''@summary: Getter.
|
||||
|
||||
@return: The list of file descriptors.
|
||||
@rtype: List of integers
|
||||
@ -233,9 +239,10 @@ class SBusDatagram(object):
|
||||
return self.h_files_
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def set_files(self, h_files):
|
||||
'''
|
||||
@summary: Setter.
|
||||
'''@summary: Setter.
|
||||
|
||||
Assign file handlers list and update n_files_ field
|
||||
|
||||
@param h_files: File descriptors.
|
||||
@ -257,9 +264,10 @@ class SBusDatagram(object):
|
||||
self.h_files_.append(h_files[i])
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def get_first_file_of_type(self, file_type):
|
||||
'''
|
||||
@summary: Iterate through file list and metadata.
|
||||
'''@summary: Iterate through file list and metadata.
|
||||
|
||||
Find the first file with the required type
|
||||
|
||||
@param file_type: The file type to look for
|
||||
@ -273,15 +281,15 @@ class SBusDatagram(object):
|
||||
if (self.get_metadata()[i])['type'] == file_type:
|
||||
try:
|
||||
required_file = os.fdopen(self.get_files()[i], 'w')
|
||||
except IOError, err:
|
||||
except IOError as err:
|
||||
syslog.syslog(syslog.LOG_DEBUG,
|
||||
'Failed to open file: %s' % err.strerror)
|
||||
return required_file
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def get_metadata(self):
|
||||
'''
|
||||
@summary: Getter.
|
||||
'''@summary: Getter.
|
||||
|
||||
@return: The list of meta-data dictionaries.
|
||||
@rtype: List of dictionaries
|
||||
@ -289,9 +297,10 @@ class SBusDatagram(object):
|
||||
return self.files_metadata_
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def set_metadata(self, metadata):
|
||||
'''
|
||||
@summary: Setter.
|
||||
'''@summary: Setter.
|
||||
|
||||
Assign file_metadata_ field
|
||||
|
||||
@param metadata: File descriptors meta-data dictionaries.
|
||||
@ -302,9 +311,9 @@ class SBusDatagram(object):
|
||||
self.files_metadata_ = metadata
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def get_exec_params(self):
|
||||
'''
|
||||
@summary: Getter.
|
||||
'''@summary: Getter.
|
||||
|
||||
@return: The execution parameters dictionary.
|
||||
@rtype: Dictionary
|
||||
@ -312,9 +321,10 @@ class SBusDatagram(object):
|
||||
return self.exec_params_
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def set_exec_params(self, params):
|
||||
'''
|
||||
@summary: Setter.
|
||||
'''@summary: Setter.
|
||||
|
||||
Assign execution parameters dictionary.
|
||||
|
||||
@param params: Execution parameters to assign
|
||||
@ -326,9 +336,10 @@ class SBusDatagram(object):
|
||||
self.exec_params_ = params
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def add_exec_param(self, param_name, param_value):
|
||||
'''
|
||||
@summary: Add a single pair to the exec_params_ dictionary
|
||||
'''@summary: Add a single pair to the exec_params_ dictionary
|
||||
|
||||
Don't change if the parameter exists already
|
||||
|
||||
@param param_name: Execution parameter name to be added
|
||||
@ -351,9 +362,9 @@ class SBusDatagram(object):
|
||||
return b_status
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def get_command(self):
|
||||
'''
|
||||
@summary: Getter.
|
||||
'''@summary: Getter.
|
||||
|
||||
@return: The Storlet Daemon command.
|
||||
@rtype: SBusStorletCommand
|
||||
@ -361,9 +372,10 @@ class SBusDatagram(object):
|
||||
return self.e_command_
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def set_command(self, cmd):
|
||||
'''
|
||||
@summary: Setter.
|
||||
'''@summary: Setter.
|
||||
|
||||
Assign Storlet Daemon command.
|
||||
|
||||
@param cmd: Command to assign
|
||||
@ -374,9 +386,9 @@ class SBusDatagram(object):
|
||||
self.e_command_ = cmd
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def get_task_id(self):
|
||||
'''
|
||||
@summary: Getter.
|
||||
'''@summary: Getter.
|
||||
|
||||
@return: The task id.
|
||||
@rtype: string
|
||||
@ -384,9 +396,10 @@ class SBusDatagram(object):
|
||||
return self.task_id_
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def set_task_id(self, taskId):
|
||||
'''
|
||||
@summary: Setter.
|
||||
'''@summary: Setter.
|
||||
|
||||
Assign task id
|
||||
|
||||
@param taskId: Command to assign
|
||||
@ -397,10 +410,11 @@ class SBusDatagram(object):
|
||||
self.task_id_ = taskId
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
@staticmethod
|
||||
def dictionaies_equal(d1, d2):
|
||||
'''
|
||||
@summary: Check whether two dictionaries has the same content.
|
||||
'''@summary: Check whether two dictionaries has the same content.
|
||||
|
||||
The order of the entries is not considered.
|
||||
|
||||
@return: The answer to the above
|
||||
|
@ -17,7 +17,6 @@ Limitations under the License.
|
||||
21-Jul-2014 evgenyl Initial implementation.
|
||||
==========================================================================='''
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
'''
|
||||
@summary: Enumerate file usage intents.
|
||||
@ -25,12 +24,12 @@ Limitations under the License.
|
||||
with its Java counterpart.
|
||||
'''
|
||||
|
||||
SBUS_FD_INPUT_OBJECT = 0
|
||||
SBUS_FD_OUTPUT_OBJECT = 1
|
||||
SBUS_FD_OUTPUT_OBJECT_METADATA = 2
|
||||
SBUS_FD_OUTPUT_OBJECT_AND_METADATA = 3
|
||||
SBUS_FD_LOGGER = 4
|
||||
SBUS_FD_OUTPUT_CONTAINER = 5
|
||||
SBUS_FD_OUTPUT_TASK_ID = 6
|
||||
SBUS_FD_INPUT_OBJECT = 0
|
||||
SBUS_FD_OUTPUT_OBJECT = 1
|
||||
SBUS_FD_OUTPUT_OBJECT_METADATA = 2
|
||||
SBUS_FD_OUTPUT_OBJECT_AND_METADATA = 3
|
||||
SBUS_FD_LOGGER = 4
|
||||
SBUS_FD_OUTPUT_CONTAINER = 5
|
||||
SBUS_FD_OUTPUT_TASK_ID = 6
|
||||
|
||||
'''============================ END OF FILE ==============================='''
|
||||
|
@ -17,7 +17,6 @@ Limitations under the License.
|
||||
21-Jul-2014 evgenyl Initial implementation.
|
||||
==========================================================================='''
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
'''
|
||||
@summary: Enumerate Storlet Daemon commands.
|
||||
@ -25,15 +24,15 @@ Limitations under the License.
|
||||
with its Java counterpart.
|
||||
'''
|
||||
|
||||
SBUS_CMD_HALT = 0
|
||||
SBUS_CMD_EXECUTE = 1
|
||||
SBUS_CMD_START_DAEMON = 2
|
||||
SBUS_CMD_STOP_DAEMON = 3
|
||||
SBUS_CMD_DAEMON_STATUS = 4
|
||||
SBUS_CMD_STOP_DAEMONS = 5
|
||||
SBUS_CMD_PING = 6
|
||||
SBUS_CMD_DESCRIPTOR = 7
|
||||
SBUS_CMD_CANCEL = 8
|
||||
SBUS_CMD_NOP = 9
|
||||
SBUS_CMD_HALT = 0
|
||||
SBUS_CMD_EXECUTE = 1
|
||||
SBUS_CMD_START_DAEMON = 2
|
||||
SBUS_CMD_STOP_DAEMON = 3
|
||||
SBUS_CMD_DAEMON_STATUS = 4
|
||||
SBUS_CMD_STOP_DAEMONS = 5
|
||||
SBUS_CMD_PING = 6
|
||||
SBUS_CMD_DESCRIPTOR = 7
|
||||
SBUS_CMD_CANCEL = 8
|
||||
SBUS_CMD_NOP = 9
|
||||
|
||||
'''============================ END OF FILE ==============================='''
|
||||
|
@ -1,5 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#-----------------------------------------------------------------------------------------------
|
||||
'''-------------------------------------------------------------------------
|
||||
Copyright IBM Corp. 2015, 2015 All Rights Reserved
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -16,8 +15,7 @@ Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
|
||||
from setuptools import setup
|
||||
setup( name = 'SBusPythonFacade',
|
||||
version = '1.0',
|
||||
package_dir={'SBusPythonFacade':''},
|
||||
packages=['SBusPythonFacade'] )
|
||||
|
||||
setup(name='SBusPythonFacade',
|
||||
version='1.0',
|
||||
package_dir={'SBusPythonFacade': ''},
|
||||
packages=['SBusPythonFacade'])
|
||||
|
@ -1,54 +1,58 @@
|
||||
#!/usr/bin/python
|
||||
#-----------------------------------------------------------------------------------------------
|
||||
# Copyright IBM Corp. 2015, 2015 All Rights Reserved
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# Limitations under the License.
|
||||
#-----------------------------------------------------------------------------------------------
|
||||
'''-------------------------------------------------------------------------
|
||||
Copyright IBM Corp. 2015, 2015 All Rights Reserved
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
'''===========================================================================
|
||||
02-Dec-2014 evgenyl Initial implementation.
|
||||
==========================================================================='''
|
||||
|
||||
import sys
|
||||
import os
|
||||
import sys
|
||||
|
||||
from SBusPythonFacade.SBus import SBus
|
||||
from SBusPythonFacade.SBusDatagram import SBusDatagram
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_HALT
|
||||
from SBusPythonFacade.SBus import SBus
|
||||
from SBusPythonFacade.SBusDatagram import SBusDatagram
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_HALT
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def print_usage(argv):
|
||||
print argv[0] + ' /path/to/daemon/factory_pipe'
|
||||
print 'Example:'
|
||||
print argv[0] + ' ',
|
||||
print '/home/lxc_device/pipes/scopes/'\
|
||||
'AUTH_fb8b63c579054c48816ca8acd090b3d9/factory_pipe'
|
||||
print(argv[0] + ' /path/to/daemon/factory_pipe')
|
||||
print('Example:')
|
||||
sys.stdout.write(argv[0] + ' ')
|
||||
print('/home/lxc_device/pipes/scopes/'
|
||||
'AUTH_fb8b63c579054c48816ca8acd090b3d9/factory_pipe')
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def main(argv):
|
||||
if 2 > len(argv):
|
||||
print_usage(argv)
|
||||
return
|
||||
|
||||
daemon_factory_pipe_name = argv[1]
|
||||
fi,fo = os.pipe()
|
||||
fi, fo = os.pipe()
|
||||
halt_dtg = SBusDatagram.create_service_datagram(SBUS_CMD_HALT, fo)
|
||||
n_status = SBus.send(daemon_factory_pipe_name, halt_dtg)
|
||||
if 0 > n_status:
|
||||
print 'Sending failed'
|
||||
else:
|
||||
print 'Sending succeeded'
|
||||
cmd_response = os.read( fi, 256 )
|
||||
print cmd_response
|
||||
print('Sending failed')
|
||||
else:
|
||||
print('Sending succeeded')
|
||||
cmd_response = os.read(fi, 256)
|
||||
print(cmd_response)
|
||||
os.close(fi)
|
||||
os.close(fo)
|
||||
|
||||
|
@ -20,38 +20,42 @@ XX-XXX-2014 eranr Initial implementation.
|
||||
01-Dec-2014 evgenyl Dropping multi-threaded monitoring
|
||||
==========================================================================='''
|
||||
|
||||
import errno
|
||||
import logging
|
||||
from logging.handlers import SysLogHandler
|
||||
import os
|
||||
import pwd
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import errno
|
||||
import signal
|
||||
import logging
|
||||
import subprocess
|
||||
from logging.handlers import SysLogHandler
|
||||
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_START_DAEMON,\
|
||||
SBUS_CMD_STOP_DAEMON, SBUS_CMD_DAEMON_STATUS, SBUS_CMD_STOP_DAEMONS,\
|
||||
SBUS_CMD_PING, SBUS_CMD_HALT
|
||||
from SBusPythonFacade.SBusFileDescription import SBUS_FD_OUTPUT_OBJECT
|
||||
from SBusPythonFacade.SBus import SBus
|
||||
from SBusPythonFacade.SBusDatagram import *
|
||||
from SBusPythonFacade.SBusDatagram import SBusDatagram
|
||||
from SBusPythonFacade.SBusFileDescription import SBUS_FD_OUTPUT_OBJECT
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_DAEMON_STATUS
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_HALT
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_PING
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_START_DAEMON
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_STOP_DAEMON
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_STOP_DAEMONS
|
||||
|
||||
'''========================================================================'''
|
||||
|
||||
|
||||
class daemon_factory():
|
||||
'''
|
||||
@summary: This class acts as the manager for storlet daemons.
|
||||
class daemon_factory(object):
|
||||
'''@summary: This class acts as the manager for storlet daemons.
|
||||
|
||||
It listens to commands and reacts on them in an internal loop.
|
||||
As for now (01-Dec-2014) it is a single thread, synchronous
|
||||
processing.
|
||||
'''
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def __init__(self, path, logger):
|
||||
'''
|
||||
@summary: CTOR
|
||||
'''@summary: CTOR
|
||||
|
||||
Prepare the auxiliary data structures
|
||||
|
||||
@param path: Path to the pipe file internal SBus listens to
|
||||
@ -65,10 +69,11 @@ class daemon_factory():
|
||||
self.storlet_name_to_pipe_name = dict()
|
||||
# Dictionary: map storlet name to daemon process PID
|
||||
self.storlet_name_to_pid = dict()
|
||||
|
||||
|
||||
self.NUM_OF_TRIES_PINGING_STARTING_DAEMON = 5
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def get_jvm_args(self,
|
||||
daemon_language,
|
||||
storlet_path,
|
||||
@ -77,8 +82,8 @@ class daemon_factory():
|
||||
uds_path,
|
||||
log_level,
|
||||
container_id):
|
||||
'''
|
||||
@summary: get_jvm_args
|
||||
'''@summary: get_jvm_args
|
||||
|
||||
Check the input parameters, produce the list
|
||||
of arguments for JVM process launch
|
||||
|
||||
@ -136,8 +141,8 @@ class daemon_factory():
|
||||
pargs = []
|
||||
if daemon_language == "java":
|
||||
self.logger.debug('START_DAEMON:preparing arguments')
|
||||
#Setting two environmental variables
|
||||
#The path strings are corrupted if passed is pargs list below
|
||||
# Setting two environmental variables
|
||||
# The path strings are corrupted if passed is pargs list below
|
||||
os.environ['CLASSPATH'] = str_dmn_clspth
|
||||
os.environ['LD_LIBRARY_PATH'] = str_library_path
|
||||
pargs = [str('/usr/bin/java'),
|
||||
@ -157,9 +162,10 @@ class daemon_factory():
|
||||
return n_error_id, error_text, pargs
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def spawn_subprocess(self, pargs):
|
||||
'''
|
||||
@summary: spawn_subprocess
|
||||
'''@summary: spawn_subprocess
|
||||
|
||||
Launch a JVM process for some storlet daemon
|
||||
|
||||
@param pargs: Arguments for the JVM
|
||||
@ -192,8 +198,8 @@ class daemon_factory():
|
||||
format(storlet_name, jvm_pid))
|
||||
# Keep JVM PID
|
||||
self.storlet_name_to_pid[storlet_name] = jvm_pid
|
||||
b_status, error_text = self.wait_for_daemon_to_initialize(
|
||||
storlet_name)
|
||||
b_status, error_text = \
|
||||
self.wait_for_daemon_to_initialize(storlet_name)
|
||||
if not b_status:
|
||||
raise 'No response from Daemon'
|
||||
self.logger.debug('START_DAEMON: just occurred')
|
||||
@ -207,9 +213,10 @@ class daemon_factory():
|
||||
return b_status, error_text
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def wait_for_daemon_to_initialize(self, storlet_name):
|
||||
'''
|
||||
@summary: wait_for_daemon_to_initialize
|
||||
'''@summary: wait_for_daemon_to_initialize
|
||||
|
||||
Send a Ping service datagram. Validate that
|
||||
Daemon response is correct. Give up after the
|
||||
predefined number of attempts (5)
|
||||
@ -222,9 +229,9 @@ class daemon_factory():
|
||||
@return: Description text of possible error
|
||||
@rtype: String
|
||||
'''
|
||||
storlet_pipe_name = self.storlet_name_to_pipe_name[storlet_name]
|
||||
self.logger.debug('Send PING command to {0} via {1}'.\
|
||||
format(storlet_name,storlet_pipe_name))
|
||||
storlet_pipe_name = self.storlet_name_to_pipe_name[storlet_name]
|
||||
self.logger.debug('Send PING command to {0} via {1}'.
|
||||
format(storlet_name, storlet_pipe_name))
|
||||
read_fd, write_fd = os.pipe()
|
||||
dtg = SBusDatagram.create_service_datagram(SBUS_CMD_PING, write_fd)
|
||||
b_status = False
|
||||
@ -241,8 +248,9 @@ class daemon_factory():
|
||||
os.close(read_fd)
|
||||
os.close(write_fd)
|
||||
return b_status, error_text
|
||||
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def process_start_daemon(self,
|
||||
daemon_language,
|
||||
storlet_path,
|
||||
@ -251,8 +259,8 @@ class daemon_factory():
|
||||
uds_path,
|
||||
log_level,
|
||||
container_id):
|
||||
'''
|
||||
@summary: process_start_daemon
|
||||
'''@summary: process_start_daemon
|
||||
|
||||
Start storlet daemon process
|
||||
|
||||
@see: get_jvm_args for the list of parameters
|
||||
@ -294,17 +302,18 @@ class daemon_factory():
|
||||
error_text = '{0} is already running'.format(storlet_name)
|
||||
self.logger.debug(error_text)
|
||||
else:
|
||||
error_text = '{0} is not running. About to spawn process'.\
|
||||
error_text = '{0} is not running. About to spawn process'. \
|
||||
format(storlet_name)
|
||||
self.logger.debug(error_text)
|
||||
b_status, error_text = self.spawn_subprocess(pargs)
|
||||
|
||||
|
||||
return b_status, error_text
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def get_process_status_by_name(self, storlet_name):
|
||||
'''
|
||||
@summary: get_process_status_by_name
|
||||
'''@summary: get_process_status_by_name
|
||||
|
||||
Check if the daemon runs for the specific storlet
|
||||
|
||||
@param storlet_name: Storlet name we are checking the daemon for
|
||||
@ -327,16 +336,17 @@ class daemon_factory():
|
||||
b_status, error_text = self.get_process_status_by_pid(
|
||||
daemon_pid, storlet_name)
|
||||
else:
|
||||
error_text = 'Storlet name {0} not found in map'.\
|
||||
error_text = 'Storlet name {0} not found in map'. \
|
||||
format(storlet_name)
|
||||
self.logger.debug(error_text)
|
||||
|
||||
return b_status, error_text
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def get_process_status_by_pid(self, daemon_pid, storlet_name):
|
||||
'''
|
||||
@summary: get_process_status_by_pid
|
||||
'''@summary: get_process_status_by_pid
|
||||
|
||||
Check if a process with specific ID runs
|
||||
|
||||
@param daemon_pid: Storlet daemon process ID
|
||||
@ -355,14 +365,14 @@ class daemon_factory():
|
||||
obtained_code = 0
|
||||
try:
|
||||
obtained_pid, obtained_code = os.waitpid(daemon_pid, os.WNOHANG)
|
||||
error_text = 'Storlet {0}, PID = {1}, ErrCode = {2}'.\
|
||||
error_text = 'Storlet {0}, PID = {1}, ErrCode = {2}'. \
|
||||
format(storlet_name, obtained_pid, obtained_code)
|
||||
self.logger.debug(error_text)
|
||||
except OSError, err:
|
||||
except OSError as err:
|
||||
if err.errno == errno.ESRCH:
|
||||
error_text = 'No running daemon for {0}'.format(storlet_name)
|
||||
elif err.errno == errno.EPERM:
|
||||
error_text = 'No permission to access daemon for {0}'.\
|
||||
error_text = 'No permission to access daemon for {0}'. \
|
||||
format(storlet_name)
|
||||
else:
|
||||
error_text = 'Unknown error'
|
||||
@ -376,9 +386,10 @@ class daemon_factory():
|
||||
return b_status, error_text
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def process_kill(self, storlet_name):
|
||||
'''
|
||||
@summary: process_kill
|
||||
'''@summary: process_kill
|
||||
|
||||
Kill the storlet daemon immediately
|
||||
(kill -9 $DMN_PID)
|
||||
|
||||
@ -399,10 +410,10 @@ class daemon_factory():
|
||||
try:
|
||||
os.kill(dmn_pid, signal.SIGKILL)
|
||||
obtained_pid, obtained_code = os.waitpid(dmn_pid, os.WNOHANG)
|
||||
error_text = 'Storlet {0}, PID = {1}, ErrCode = {2}'.\
|
||||
error_text = 'Storlet {0}, PID = {1}, ErrCode = {2}'. \
|
||||
format(storlet_name, obtained_pid, obtained_code)
|
||||
self.logger.debug(error_text)
|
||||
except:
|
||||
except Exception:
|
||||
self.logger.debug('Crash while killing storlet')
|
||||
self.storlet_name_to_pid.pop(storlet_name)
|
||||
else:
|
||||
@ -412,10 +423,11 @@ class daemon_factory():
|
||||
return b_success, error_text
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def process_kill_all(self):
|
||||
'''
|
||||
@summary: process_kill_all
|
||||
Iterate through storlet daemons. Kill every one.
|
||||
'''@summary: process_kill_all Iterate through storlet daemons.
|
||||
|
||||
Kill every one.
|
||||
|
||||
@return: Status (True)
|
||||
@rtype: Boolean
|
||||
@ -427,9 +439,10 @@ class daemon_factory():
|
||||
return True, 'OK'
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def shutdown_all_processes(self):
|
||||
'''
|
||||
@summary: shutdown_all_processes
|
||||
'''@summary: shutdown_all_processes
|
||||
|
||||
send HALT command to every spawned process
|
||||
'''
|
||||
answer = ''
|
||||
@ -441,9 +454,9 @@ class daemon_factory():
|
||||
return True, answer
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def shutdown_process(self, storlet_name):
|
||||
'''
|
||||
@summary: send HALT command to storlet daemon
|
||||
'''@summary: send HALT command to storlet daemon
|
||||
|
||||
@param storlet_name: Storlet name we are checking the daemon for
|
||||
@type storlet_name: String
|
||||
@ -452,45 +465,46 @@ class daemon_factory():
|
||||
@rtype: Boolean
|
||||
@return: Description text of possible error
|
||||
@rtype: String
|
||||
'''
|
||||
'''
|
||||
|
||||
b_status = False
|
||||
error_text = ''
|
||||
self.logger.debug('Inside shutdown_process {0}'.format(storlet_name))
|
||||
storlet_pipe_name = self.storlet_name_to_pipe_name[storlet_name]
|
||||
self.logger.debug('Send HALT command to {0} via {1}'.\
|
||||
format(storlet_name,storlet_pipe_name))
|
||||
self.logger.debug('Inside shutdown_process {0}'.format(storlet_name))
|
||||
storlet_pipe_name = self.storlet_name_to_pipe_name[storlet_name]
|
||||
self.logger.debug('Send HALT command to {0} via {1}'.
|
||||
format(storlet_name, storlet_pipe_name))
|
||||
read_fd, write_fd = os.pipe()
|
||||
dtg = SBusDatagram.create_service_datagram(SBUS_CMD_HALT, write_fd)
|
||||
SBus.send(storlet_pipe_name, dtg)
|
||||
os.close(read_fd)
|
||||
os.close(write_fd)
|
||||
dmn_pid = self.storlet_name_to_pid.get(storlet_name, -1)
|
||||
self.logger.debug('Storlet Daemon PID is {0}'.\
|
||||
format(dmn_pid))
|
||||
self.logger.debug('Storlet Daemon PID is {0}'.format(dmn_pid))
|
||||
if -1 != dmn_pid:
|
||||
os.waitpid(dmn_pid,0)
|
||||
os.waitpid(dmn_pid, 0)
|
||||
self.storlet_name_to_pid.pop(storlet_name)
|
||||
b_status = True
|
||||
return b_status
|
||||
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def dispatch_command(self, dtg, container_id):
|
||||
'''
|
||||
@summary: dispatch_command
|
||||
Parse datagram. React on the request.
|
||||
'''@summary: dispatch_command
|
||||
|
||||
Parse datagram. React on the request.
|
||||
|
||||
@param dtg: Datagram to process
|
||||
@type dtg: SBus python facade Datagram
|
||||
@param container_id: container id
|
||||
@type container_id: String
|
||||
|
||||
|
||||
@return: Status
|
||||
@rtype: Boolean
|
||||
@return: Description text of possible error
|
||||
@rtype: String
|
||||
@return: Flag - whether we need to continue operating
|
||||
@rtype: Boolean
|
||||
'''
|
||||
@rtype: Boolean
|
||||
'''
|
||||
|
||||
b_status = False
|
||||
error_text = ''
|
||||
b_iterate = True
|
||||
@ -498,141 +512,146 @@ class daemon_factory():
|
||||
try:
|
||||
command = dtg.get_command()
|
||||
except Exception:
|
||||
error_text = "Received message does not have command"\
|
||||
error_text = "Received message does not have command" \
|
||||
" identifier. continuing."
|
||||
b_status = False
|
||||
self.logger.error( error_text )
|
||||
self.logger.error(error_text)
|
||||
else:
|
||||
self.logger.debug("Received command {0}".format(command))
|
||||
|
||||
|
||||
prms = dtg.get_exec_params()
|
||||
if command == SBUS_CMD_START_DAEMON:
|
||||
self.logger.debug( 'Do SBUS_CMD_START_DAEMON' )
|
||||
self.logger.debug( 'prms = %s'%str(prms) )
|
||||
self.logger.debug('Do SBUS_CMD_START_DAEMON')
|
||||
self.logger.debug('prms = %s' % str(prms))
|
||||
b_status, error_text = \
|
||||
self.process_start_daemon(prms['daemon_language'],
|
||||
prms['storlet_path'],
|
||||
prms['storlet_name'],
|
||||
prms['pool_size'],
|
||||
prms['uds_path'],
|
||||
prms['storlet_path'],
|
||||
prms['storlet_name'],
|
||||
prms['pool_size'],
|
||||
prms['uds_path'],
|
||||
prms['log_level'],
|
||||
container_id)
|
||||
elif command == SBUS_CMD_STOP_DAEMON:
|
||||
self.logger.debug( 'Do SBUS_CMD_STOP_DAEMON' )
|
||||
b_status, error_text = self.process_kill(\
|
||||
prms['storlet_name'])
|
||||
self.logger.debug('Do SBUS_CMD_STOP_DAEMON')
|
||||
b_status, error_text = \
|
||||
self.process_kill(prms['storlet_name'])
|
||||
elif command == SBUS_CMD_DAEMON_STATUS:
|
||||
self.logger.debug( 'Do SBUS_CMD_DAEMON_STATUS' )
|
||||
b_status, error_text = self.get_process_status_by_name(\
|
||||
prms['storlet_name'])
|
||||
self.logger.debug('Do SBUS_CMD_DAEMON_STATUS')
|
||||
b_status, error_text = \
|
||||
self.get_process_status_by_name(prms['storlet_name'])
|
||||
elif command == SBUS_CMD_STOP_DAEMONS:
|
||||
self.logger.debug( 'Do SBUS_CMD_STOP_DAEMONS' )
|
||||
self.logger.debug('Do SBUS_CMD_STOP_DAEMONS')
|
||||
b_status, error_text = self.process_kill_all()
|
||||
b_iterate = False
|
||||
elif command == SBUS_CMD_HALT:
|
||||
self.logger.debug( 'Do SBUS_CMD_HALT' )
|
||||
self.logger.debug('Do SBUS_CMD_HALT')
|
||||
b_status, error_text = self.shutdown_all_processes()
|
||||
b_iterate = False
|
||||
elif command == SBUS_CMD_PING:
|
||||
self.logger.debug( 'Do SBUS_CMD_PING' )
|
||||
self.logger.debug('Do SBUS_CMD_PING')
|
||||
b_status = True
|
||||
error_text = 'OK'
|
||||
else:
|
||||
b_status = False
|
||||
error_text = "got unknown command %d" % command
|
||||
self.logger.error( error_text )
|
||||
|
||||
self.logger.debug( 'Done' )
|
||||
self.logger.error(error_text)
|
||||
|
||||
self.logger.debug('Done')
|
||||
return b_status, error_text, b_iterate
|
||||
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def main_loop(self, container_id):
|
||||
'''
|
||||
@summary: main_loop
|
||||
'''@summary: main_loop
|
||||
|
||||
The 'internal' loop. Listen to SBus, receive datagram,
|
||||
dispatch command, report back.
|
||||
'''
|
||||
|
||||
# Create SBus. Listen and process requests
|
||||
sbus = SBus()
|
||||
fd = sbus.create( self.pipe_path )
|
||||
fd = sbus.create(self.pipe_path)
|
||||
if fd < 0:
|
||||
self.logger.error("Failed to create SBus. exiting.")
|
||||
return
|
||||
|
||||
|
||||
b_iterate = True
|
||||
b_status = True
|
||||
error_text = ''
|
||||
|
||||
|
||||
while b_iterate:
|
||||
rc = sbus.listen(fd)
|
||||
if rc < 0:
|
||||
self.logger.error("Failed to wait on SBus. exiting.")
|
||||
return
|
||||
self.logger.debug("Wait returned")
|
||||
|
||||
|
||||
dtg = sbus.receive(fd)
|
||||
if not dtg:
|
||||
self.logger.error("Failed to receive message. exiting.")
|
||||
return
|
||||
|
||||
|
||||
try:
|
||||
outfd = dtg.get_first_file_of_type( SBUS_FD_OUTPUT_OBJECT )
|
||||
outfd = dtg.get_first_file_of_type(SBUS_FD_OUTPUT_OBJECT)
|
||||
except Exception:
|
||||
self.logger.error("Received message does not have outfd."\
|
||||
self.logger.error("Received message does not have outfd."
|
||||
" continuing.")
|
||||
continue
|
||||
else:
|
||||
self.logger.debug("Received outfd %d" % outfd.fileno())
|
||||
|
||||
b_status, error_text, b_iterate = self.dispatch_command(dtg, container_id)
|
||||
|
||||
b_status, error_text, b_iterate = \
|
||||
self.dispatch_command(dtg, container_id)
|
||||
|
||||
self.log_and_report(outfd, b_status, error_text)
|
||||
outfd.close()
|
||||
|
||||
|
||||
# We left the main loop for some reason. Terminating.
|
||||
self.logger.debug( 'Leaving main loop' )
|
||||
|
||||
self.logger.debug('Leaving main loop')
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def log_and_report(self, outfd, b_status, error_text):
|
||||
'''
|
||||
@summary: log_and_report
|
||||
Send the result description message
|
||||
'''@summary: log_and_report
|
||||
|
||||
Send the result description message
|
||||
back to swift middlewear
|
||||
|
||||
|
||||
@param outfd: Output channel to send the message to
|
||||
@type outfd: File descriptor
|
||||
@param b_status: Flag, whether the operation was successful
|
||||
@type: Boolean
|
||||
@param error_text: The result description
|
||||
@type error_text: String
|
||||
|
||||
|
||||
@rtype: void
|
||||
'''
|
||||
num = -1;
|
||||
answer = str(b_status) + ': ' + error_text
|
||||
self.logger.debug(' Just processed command')
|
||||
self.logger.debug(' Going to answer: %s'%answer)
|
||||
self.logger.debug(' Going to answer: %s' % answer)
|
||||
try:
|
||||
num = outfd.write( answer )
|
||||
outfd.write(answer)
|
||||
self.logger.debug(" ... and still alive")
|
||||
except:
|
||||
self.logger.debug('Problem while writing response %s'%answer)
|
||||
except Exception:
|
||||
self.logger.debug('Problem while writing response %s' % answer)
|
||||
|
||||
'''======================= END OF daemon_factory CLASS ===================='''
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def start_logger(logger_name, log_level, container_id):
|
||||
'''
|
||||
@summary: start_logger
|
||||
Initialize logging of this process.
|
||||
'''@summary: start_logger
|
||||
|
||||
Initialize logging of this process.
|
||||
Set the logger format.
|
||||
|
||||
|
||||
@param logger_name: The name to report with
|
||||
@type logger_name: String
|
||||
@param log_level: The verbosity level
|
||||
@type log_level: String
|
||||
|
||||
@rtype: void
|
||||
|
||||
@rtype: void
|
||||
'''
|
||||
logging.raiseExceptions = False
|
||||
log_level = log_level.upper()
|
||||
@ -648,26 +667,25 @@ def start_logger(logger_name, log_level, container_id):
|
||||
else:
|
||||
level = logging.ERROR
|
||||
|
||||
|
||||
logger = logging.getLogger("CONT #" + container_id + ": " + logger_name)
|
||||
|
||||
if log_level == 'OFF':
|
||||
logging.disable(logging.CRITICAL)
|
||||
else:
|
||||
logger.setLevel(level)
|
||||
|
||||
for i in range(0,4):
|
||||
|
||||
for i in range(0, 4):
|
||||
try:
|
||||
sysLogh = SysLogHandler('/dev/log')
|
||||
break
|
||||
except Exception as e:
|
||||
if i<3:
|
||||
if i < 3:
|
||||
time.sleep(1)
|
||||
else:
|
||||
raise e
|
||||
|
||||
str_format = '%(name)-12s: %(levelname)-8s %(funcName)s'+\
|
||||
' %(lineno)s [%(process)d, %(threadName)s]'+\
|
||||
|
||||
str_format = '%(name)-12s: %(levelname)-8s %(funcName)s' + \
|
||||
' %(lineno)s [%(process)d, %(threadName)s]' + \
|
||||
' %(message)s'
|
||||
formatter = logging.Formatter(str_format)
|
||||
sysLogh.setFormatter(formatter)
|
||||
@ -676,42 +694,46 @@ def start_logger(logger_name, log_level, container_id):
|
||||
return logger
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def usage():
|
||||
'''
|
||||
@summary: usage
|
||||
'''@summary: usage
|
||||
|
||||
Print the expected command line arguments.
|
||||
|
||||
|
||||
@rtype: void
|
||||
'''
|
||||
print "daemon_factory <path> <log level> <container_id>"
|
||||
print("daemon_factory <path> <log level> <container_id>")
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def main(argv):
|
||||
'''
|
||||
@summary: main
|
||||
The entry point.
|
||||
- Initialize logger,
|
||||
'''@summary: main
|
||||
|
||||
The entry point.
|
||||
- Initialize logger,
|
||||
- impersonate to swift user,
|
||||
- create an instance of daemon_factory,
|
||||
- start the main loop.
|
||||
- create an instance of daemon_factory,
|
||||
- start the main loop.
|
||||
'''
|
||||
|
||||
if (len(argv) != 3):
|
||||
usage()
|
||||
return
|
||||
|
||||
|
||||
pipe_path = argv[0]
|
||||
log_level = argv[1]
|
||||
container_id = argv[2]
|
||||
logger = start_logger("daemon_factory", log_level, container_id)
|
||||
logger.debug("Daemon factory started")
|
||||
SBus.start_logger("DEBUG", container_id=container_id)
|
||||
|
||||
|
||||
# Impersonate the swift user
|
||||
pw = pwd.getpwnam('swift')
|
||||
os.setresgid(pw.pw_gid,pw.pw_gid,pw.pw_gid)
|
||||
os.setresuid(pw.pw_uid,pw.pw_uid,pw.pw_uid)
|
||||
os.setresgid(pw.pw_gid, pw.pw_gid, pw.pw_gid)
|
||||
os.setresuid(pw.pw_uid, pw.pw_uid, pw.pw_uid)
|
||||
|
||||
|
||||
factory = daemon_factory(pipe_path, logger)
|
||||
factory.main_loop(container_id)
|
||||
|
||||
|
@ -14,8 +14,9 @@ See the License for the specific language governing permissions and
|
||||
Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
|
||||
from setuptools import setup, Extension
|
||||
setup(name = 'storlet_daemon_factory',
|
||||
version = '1.0',
|
||||
package_dir={'storlet_daemon_factory':''},
|
||||
from setuptools import setup
|
||||
|
||||
setup(name='storlet_daemon_factory',
|
||||
version='1.0',
|
||||
package_dir={'storlet_daemon_factory': ''},
|
||||
packages=['storlet_daemon_factory'])
|
||||
|
@ -13,10 +13,11 @@ See the License for the specific language governing permissions and
|
||||
Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
from setuptools import setup
|
||||
paste_factory = ['storlet_handler = storlet_middleware.storlet_handler:filter_factory']
|
||||
paste_factory = ['storlet_handler = '
|
||||
'storlet_middleware.storlet_handler:filter_factory']
|
||||
|
||||
setup(name='storlets',
|
||||
version='1.0',
|
||||
packages=['storlet_middleware','storlet_gateway'],
|
||||
packages=['storlet_middleware', 'storlet_gateway'],
|
||||
entry_points={'paste.filter_factory': paste_factory}
|
||||
)
|
||||
|
@ -1,3 +0,0 @@
|
||||
|
||||
|
||||
|
@ -20,18 +20,20 @@ Created on Mar 24, 2015
|
||||
'''
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import select
|
||||
from eventlet import Timeout
|
||||
import shutil
|
||||
|
||||
from eventlet import Timeout
|
||||
from storlet_middleware.storlet_common import StorletGatewayBase
|
||||
from storlet_runtime import RunTimePaths
|
||||
from storlet_runtime import RunTimeSandbox
|
||||
from storlet_runtime import StorletInvocationGETProtocol
|
||||
from storlet_runtime import StorletInvocationPUTProtocol
|
||||
from storlet_runtime import StorletInvocationSLOProtocol
|
||||
from swift.common.internal_client import InternalClient as ic
|
||||
from swift.common.swob import Request
|
||||
from storlet_runtime import RunTimeSandbox, RunTimePaths
|
||||
from storlet_runtime import StorletInvocationGETProtocol,\
|
||||
StorletInvocationPUTProtocol, StorletInvocationSLOProtocol
|
||||
from swift.common.utils import config_true_value
|
||||
from storlet_middleware.storlet_common import StorletGatewayBase
|
||||
|
||||
|
||||
'''---------------------------------------------------------------------------
|
||||
The Storlet Gateway API
|
||||
@ -52,24 +54,25 @@ The API is made of:
|
||||
---------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
class DockerStorletRequest():
|
||||
'''
|
||||
The StorletRequest class represents a request to be processed by the
|
||||
class DockerStorletRequest(object):
|
||||
'''The StorletRequest class represents a request to be processed by the
|
||||
|
||||
storlet the request is derived from the Swift request and
|
||||
essentially consists of:
|
||||
1. A data stream to be processed
|
||||
2. Metadata identifying the stream
|
||||
'''
|
||||
|
||||
def user_metadata(self, headers):
|
||||
metadata = {}
|
||||
for key in headers:
|
||||
if (key.startswith('X-Storlet') or
|
||||
key.startswith('X-Object-Meta-Storlet')):
|
||||
pass
|
||||
pass
|
||||
elif (key.startswith('X-Object-Meta-') or
|
||||
key.startswith('X-Object-Meta-'.lower())):
|
||||
short_key = key[len('X-Object-Meta-'):]
|
||||
metadata[short_key] = headers[key]
|
||||
short_key = key[len('X-Object-Meta-'):]
|
||||
metadata[short_key] = headers[key]
|
||||
return metadata
|
||||
|
||||
def _getInitialRequest(self):
|
||||
@ -130,7 +133,7 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
|
||||
def read_with_timeout(self, size):
|
||||
timeout = Timeout(self.timeout)
|
||||
try:
|
||||
@ -148,10 +151,10 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
timeout.cancel()
|
||||
|
||||
return chunk
|
||||
|
||||
def next(self, size = 1024):
|
||||
|
||||
def next(self, size=1024):
|
||||
chunk = None
|
||||
r, w, e = select.select([ self.obj_data ], [], [ ], self.timeout)
|
||||
r, w, e = select.select([self.obj_data], [], [], self.timeout)
|
||||
if len(r) == 0:
|
||||
self.close()
|
||||
if self.obj_data in r:
|
||||
@ -161,25 +164,25 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
else:
|
||||
return chunk
|
||||
raise StopIteration('Stopped iterator ex')
|
||||
|
||||
|
||||
def read(self, size=1024):
|
||||
return self.next(size)
|
||||
|
||||
|
||||
def readline(self, size=-1):
|
||||
return ''
|
||||
|
||||
def readlines(self, sizehint=-1):
|
||||
pass;
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
if self.closed == True:
|
||||
if self.closed is True:
|
||||
return
|
||||
self.closed = True
|
||||
os.close(self.obj_data)
|
||||
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
||||
|
||||
def validateStorletUpload(self, req):
|
||||
|
||||
if (self.container == self.sconf['storlet_container']):
|
||||
@ -223,9 +226,9 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
self._clean_storlet_stuff_from_request(req.headers)
|
||||
req.headers.pop('X-Run-Storlet')
|
||||
|
||||
slog_path = self.\
|
||||
slog_path = self. \
|
||||
paths.slog_path(self.idata['storlet_main_class'])
|
||||
storlet_pipe_path = self.\
|
||||
storlet_pipe_path = self. \
|
||||
paths.host_storlet_pipe(self.idata['storlet_main_class'])
|
||||
|
||||
sprotocol = StorletInvocationPUTProtocol(sreq,
|
||||
@ -237,7 +240,9 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
self._set_metadata_in_headers(req.headers, out_md)
|
||||
self._upload_storlet_logs(slog_path)
|
||||
|
||||
return out_md, StorletGatewayDocker.IterLike(self.data_read_fd, self.storlet_timeout, sprotocol._cancel)
|
||||
return out_md, StorletGatewayDocker.IterLike(self.data_read_fd,
|
||||
self.storlet_timeout,
|
||||
sprotocol._cancel)
|
||||
|
||||
def gatewayProxyGETFlow(self, req, container, obj, orig_resp):
|
||||
# Flow for running the GET computation on the proxy
|
||||
@ -250,9 +255,9 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
docker_updated)
|
||||
self._add_system_params(req.params)
|
||||
|
||||
slog_path = self.\
|
||||
slog_path = self. \
|
||||
paths.slog_path(self.idata['storlet_main_class'])
|
||||
storlet_pipe_path = self.\
|
||||
storlet_pipe_path = self. \
|
||||
paths.host_storlet_pipe(self.idata['storlet_main_class'])
|
||||
|
||||
sprotocol = StorletInvocationSLOProtocol(sreq,
|
||||
@ -264,7 +269,9 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
self._set_metadata_in_headers(orig_resp.headers, out_md)
|
||||
self._upload_storlet_logs(slog_path)
|
||||
|
||||
return out_md, StorletGatewayDocker.IterLike(self.data_read_fd, self.storlet_timeout, sprotocol._cancel)
|
||||
return out_md, StorletGatewayDocker.IterLike(self.data_read_fd,
|
||||
self.storlet_timeout,
|
||||
sprotocol._cancel)
|
||||
|
||||
def gatewayObjectGetFlow(self, req, container, obj, orig_resp):
|
||||
sreq = StorletGETRequest(self.account, orig_resp, req.params)
|
||||
@ -276,9 +283,9 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
docker_updated)
|
||||
self._add_system_params(req.params)
|
||||
|
||||
slog_path = self.\
|
||||
slog_path = self. \
|
||||
paths.slog_path(self.idata['storlet_main_class'])
|
||||
storlet_pipe_path = self.paths.\
|
||||
storlet_pipe_path = self.paths. \
|
||||
host_storlet_pipe(self.idata['storlet_main_class'])
|
||||
|
||||
sprotocol = StorletInvocationGETProtocol(sreq, storlet_pipe_path,
|
||||
@ -290,7 +297,9 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
self._set_metadata_in_headers(orig_resp.headers, out_md)
|
||||
self._upload_storlet_logs(slog_path)
|
||||
|
||||
return out_md, StorletGatewayDocker.IterLike(self.data_read_fd, self.storlet_timeout, sprotocol._cancel)
|
||||
return out_md, StorletGatewayDocker.IterLike(self.data_read_fd,
|
||||
self.storlet_timeout,
|
||||
sprotocol._cancel)
|
||||
|
||||
def verify_access(self, env, version, account, container, object):
|
||||
self.logger.info('Verify access to {0}/{1}/{2}'.format(account,
|
||||
@ -343,34 +352,34 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
req.headers['X-Storlet-' + key] = val
|
||||
|
||||
def _add_system_params(self, params):
|
||||
'''
|
||||
Adds Storlet engine specific parameters to the invocation
|
||||
'''Adds Storlet engine specific parameters to the invocation
|
||||
|
||||
currently, this consists only of the execution path of the
|
||||
Storlet within the Docker container.
|
||||
'''
|
||||
params['storlet_execution_path'] = self.\
|
||||
params['storlet_execution_path'] = self. \
|
||||
paths.sbox_storlet_exec(self.idata['storlet_main_class'])
|
||||
|
||||
def _clean_storlet_stuff_from_request(self, headers):
|
||||
for key in headers:
|
||||
if (key.startswith('X-Storlet') or
|
||||
key.startswith('X-Object-Meta-Storlet')):
|
||||
del headers[key]
|
||||
del headers[key]
|
||||
return headers
|
||||
|
||||
def _get_storlet_invocation_data(self, req):
|
||||
data = dict()
|
||||
data['storlet_name'] = req.headers.get('X-Run-Storlet')
|
||||
data['generate_log'] = req.headers.get('X-Storlet-Generate-Log', False)
|
||||
data['storlet_original_timestamp'] = req.headers.\
|
||||
data['storlet_original_timestamp'] = req.headers. \
|
||||
get('X-Storlet-X-Timestamp')
|
||||
data['storlet_original_size'] = req.headers.\
|
||||
data['storlet_original_size'] = req.headers. \
|
||||
get('X-Storlet-Content-Length')
|
||||
data['storlet_md'] = {'storlet_original_timestamp':
|
||||
data['storlet_original_timestamp'],
|
||||
'storlet_original_size':
|
||||
data['storlet_original_size']}
|
||||
data['storlet_main_class'] = req.headers.\
|
||||
data['storlet_main_class'] = req.headers. \
|
||||
get('X-Object-Meta-Storlet-Main')
|
||||
|
||||
scope = self.account
|
||||
@ -378,7 +387,7 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
if data['scope'].rfind(':') > 0:
|
||||
data['scope'] = data['scope'][:data['scope'].rfind(':')]
|
||||
|
||||
data['storlet_dependency'] = req.headers.\
|
||||
data['storlet_dependency'] = req.headers. \
|
||||
get('X-Object-Meta-Storlet-Dependency')
|
||||
data['request_params'] = req.params
|
||||
return data
|
||||
@ -395,7 +404,7 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
try:
|
||||
headers = dict()
|
||||
headers['CONTENT_TYPE'] = 'text/html'
|
||||
log_obj_name = '%s.log' %\
|
||||
log_obj_name = '%s.log' % \
|
||||
self.idata['storlet_name'][:self.idata['storlet_name'].
|
||||
find('-')]
|
||||
client.upload_object(logfile, self.account,
|
||||
@ -405,8 +414,8 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
raise e
|
||||
|
||||
def bring_from_cache(self, obj_name, is_storlet):
|
||||
'''
|
||||
Auxiliary function that:
|
||||
'''Auxiliary function that:
|
||||
|
||||
(1) Brings from Swift obj_name, whether this is a
|
||||
storlet or a storlet dependency.
|
||||
(2) Copies from local cache into the Docker conrainer
|
||||
@ -425,7 +434,7 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
swift_source_container = self.paths.storlet_container
|
||||
|
||||
if not os.path.exists(cache_dir):
|
||||
os.makedirs(cache_dir, 0755)
|
||||
os.makedirs(cache_dir, 0o755)
|
||||
|
||||
# cache_target_path is the actual object we need to deal with
|
||||
# e.g. a concrete storlet or dependency we need to bring/update
|
||||
@ -467,7 +476,7 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
fn.close()
|
||||
|
||||
if not is_storlet:
|
||||
expected_perm = resp.headers.\
|
||||
expected_perm = resp.headers. \
|
||||
get('X-Object-Meta-Storlet-Dependency-Permissions', '')
|
||||
if expected_perm != '':
|
||||
os.chmod(cache_target_path, int(expected_perm, 8))
|
||||
@ -479,12 +488,12 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
# 1. The Docker container does not hold a copy of the object
|
||||
# 2. The Docker container holds an older version of the object
|
||||
update_docker = False
|
||||
docker_storlet_path = self.paths.\
|
||||
docker_storlet_path = self.paths. \
|
||||
host_storlet(self.idata['storlet_main_class'])
|
||||
docker_target_path = os.path.join(docker_storlet_path, obj_name)
|
||||
|
||||
if not os.path.exists(docker_storlet_path):
|
||||
os.makedirs(docker_storlet_path, 0755)
|
||||
os.makedirs(docker_storlet_path, 0o755)
|
||||
update_docker = True
|
||||
elif not os.path.isfile(docker_target_path):
|
||||
update_docker = True
|
||||
@ -493,7 +502,7 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
fstat_docker_object = os.stat(docker_target_path)
|
||||
b_size_changed = fstat_cached_object.st_size \
|
||||
!= fstat_docker_object.st_size
|
||||
b_time_changed = float(fstat_cached_object.st_mtime) <\
|
||||
b_time_changed = float(fstat_cached_object.st_mtime) < \
|
||||
float(fstat_docker_object.st_mtime)
|
||||
if (b_size_changed or b_time_changed):
|
||||
update_docker = True
|
||||
@ -506,8 +515,8 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
return update_docker
|
||||
|
||||
def update_docker_container_from_cache(self):
|
||||
'''
|
||||
Iterates over the storlet name and its dependencies appearing
|
||||
'''Iterates over the storlet name and its dependencies appearing
|
||||
|
||||
in the invocation data and make sure they are brought to the
|
||||
local cache, and from there to the Docker container.
|
||||
Uses the bring_from_cache auxiliary function.
|
||||
@ -516,7 +525,7 @@ class StorletGatewayDocker(StorletGatewayBase):
|
||||
# where at the host side, reside the storlet containers
|
||||
storlet_path = self.paths.host_storlet_prefix()
|
||||
if not os.path.exists(storlet_path):
|
||||
os.makedirs(storlet_path, 0755)
|
||||
os.makedirs(storlet_path, 0o755)
|
||||
|
||||
# Iterate over storlet and dependencies, and make sure
|
||||
# they are updated within the Docker container.
|
||||
|
@ -20,54 +20,60 @@ Created on Feb 10, 2015
|
||||
'''
|
||||
|
||||
import os
|
||||
import time
|
||||
import stat
|
||||
import select
|
||||
import commands
|
||||
import stat
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
import eventlet
|
||||
from eventlet.timeout import Timeout
|
||||
import json
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from swift.common.constraints import MAX_META_OVERALL_SIZE
|
||||
from swift.common.swob import HTTPBadRequest, Request,\
|
||||
HTTPInternalServerError
|
||||
|
||||
from SBusPythonFacade.SBus import *
|
||||
from SBusPythonFacade.SBusDatagram import *
|
||||
from SBusPythonFacade.SBusStorletCommand import *
|
||||
from SBusPythonFacade.SBusFileDescription import *
|
||||
from SBusPythonFacade.SBus import SBus
|
||||
from SBusPythonFacade.SBusDatagram import SBusDatagram
|
||||
from SBusPythonFacade.SBusFileDescription import SBUS_FD_INPUT_OBJECT
|
||||
from SBusPythonFacade.SBusFileDescription import SBUS_FD_LOGGER
|
||||
from SBusPythonFacade.SBusFileDescription import SBUS_FD_OUTPUT_OBJECT
|
||||
from SBusPythonFacade.SBusFileDescription import SBUS_FD_OUTPUT_OBJECT_METADATA
|
||||
from SBusPythonFacade.SBusFileDescription import SBUS_FD_OUTPUT_TASK_ID
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_CANCEL
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_DAEMON_STATUS
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_EXECUTE
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_PING
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_START_DAEMON
|
||||
from SBusPythonFacade.SBusStorletCommand import SBUS_CMD_STOP_DAEMON
|
||||
from storlet_middleware.storlet_common import StorletLogger
|
||||
from swift.common.constraints import MAX_META_OVERALL_SIZE
|
||||
|
||||
eventlet.monkey_patch()
|
||||
|
||||
|
||||
'''---------------------------------------------------------------------------
|
||||
Sandbox API
|
||||
'''
|
||||
|
||||
class RunTimePaths():
|
||||
'''
|
||||
The Storlet Engine need to be access stuff located in many paths:
|
||||
1. The various communication channels represented as pipes in the filesystem
|
||||
|
||||
class RunTimePaths(object):
|
||||
'''The Storlet Engine need to be access stuff located in many paths:
|
||||
|
||||
1. The various communication channels represented as pipes in the
|
||||
filesystem
|
||||
2. Directories where to place Storlets
|
||||
3. Directories where to place logs
|
||||
|
||||
|
||||
Communication channels
|
||||
----------------------
|
||||
The RunTimeSandbox communicates with the Sandbox via two types of pipes
|
||||
1. factory pipe - defined per account, used for communication with the sandbox
|
||||
1. factory pipe - defined per account, used for communication with the
|
||||
sandbox
|
||||
for e.g. start/stop a storlet daemon
|
||||
2. Storlet pipe - defined per account and Storlet, used for communication
|
||||
with a storlet daemon, e.g. to call the invoke API
|
||||
|
||||
|
||||
Each pipe type has two paths:
|
||||
1. A path that is inside the sandbox
|
||||
2. A path that is outside of the sandbox or at the host side. As such
|
||||
this path is prefixed by 'host_'
|
||||
|
||||
|
||||
Thus, we have the following 4 paths of interest:
|
||||
1. sandbox_factory_pipe_path
|
||||
2. host_factory_pipe_path
|
||||
@ -75,7 +81,7 @@ class RunTimePaths():
|
||||
4. host_storlet_pipe_path
|
||||
|
||||
Our implementation uses the following path structure for the various pipes:
|
||||
In the host, all pipes belonging to a given account are prefixed by
|
||||
In the host, all pipes belonging to a given account are prefixed by
|
||||
<pipes_dir>/<account>, where <pipes_dir> comes from the configuration
|
||||
Thus:
|
||||
host_factory_pipe_path is of the form <pipes_dir>/<account>/factory_pipe
|
||||
@ -87,18 +93,20 @@ class RunTimePaths():
|
||||
|
||||
Storlets Locations
|
||||
------------------
|
||||
The Storlet binaries are accessible from the sandbox using a mounted directory.
|
||||
The Storlet binaries are accessible from the sandbox using a mounted
|
||||
directory.
|
||||
This directory is called the storlet directories.
|
||||
On the host side it is of the form <storlet_dir>/<account>/<storlet_name>
|
||||
On the sandbox side it is of the form /home/swift/<storlet_name>
|
||||
<storlet_dir> comes from the configuration
|
||||
<storlet_name> is the prefix of the jar.
|
||||
|
||||
|
||||
Logs
|
||||
----
|
||||
Logs are located in paths of the form:
|
||||
<log_dir>/<account>/<storlet_name>.log
|
||||
'''
|
||||
|
||||
def __init__(self, account, conf):
|
||||
self.account = account
|
||||
self.scope = account[5:18]
|
||||
@ -107,14 +115,13 @@ class RunTimePaths():
|
||||
self.factory_pipe_suffix = 'factory_pipe'
|
||||
self.sandbox_pipe_prefix = '/mnt/channels'
|
||||
self.storlet_pipe_suffix = '_storlet_pipe'
|
||||
self.sandbox_storlet_dir_prefix = '/home/swift'
|
||||
self.sandbox_storlet_dir_prefix = '/home/swift'
|
||||
self.host_storlet_root = conf['storlets_dir']
|
||||
self.host_log_path_root = conf['log_dir']
|
||||
self.host_cache_root = conf['cache_dir']
|
||||
self.storlet_container = conf['storlet_container']
|
||||
self.storlet_dependency = conf['storlet_dependency']
|
||||
|
||||
|
||||
def host_pipe_prefix(self):
|
||||
return os.path.join(self.host_pipe_root, self.scope)
|
||||
|
||||
@ -126,48 +133,53 @@ class RunTimePaths():
|
||||
os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
|
||||
|
||||
def host_factory_pipe(self):
|
||||
return os.path.join(self.host_pipe_prefix(),
|
||||
return os.path.join(self.host_pipe_prefix(),
|
||||
self.factory_pipe_suffix)
|
||||
|
||||
|
||||
def host_storlet_pipe(self, storlet_id):
|
||||
return os.path.join(self.host_pipe_prefix(),
|
||||
storlet_id)
|
||||
|
||||
|
||||
def sbox_storlet_pipe(self, storlet_id):
|
||||
return os.path.join(self.sandbox_pipe_prefix,
|
||||
storlet_id)
|
||||
|
||||
|
||||
def sbox_storlet_exec(self, storlet_id):
|
||||
return os.path.join(self.sandbox_storlet_dir_prefix, storlet_id)
|
||||
|
||||
|
||||
def host_storlet_prefix(self):
|
||||
return os.path.join(self.host_storlet_root, self.scope)
|
||||
|
||||
|
||||
def host_storlet(self, storlet_id):
|
||||
return os.path.join(self.host_storlet_prefix(), storlet_id)
|
||||
|
||||
|
||||
def slog_path(self, storlet_id):
|
||||
log_dir = os.path.join(self.host_log_path_root, self.scope, storlet_id)
|
||||
if not os.path.exists(log_dir):
|
||||
os.makedirs(log_dir)
|
||||
return log_dir
|
||||
|
||||
|
||||
def get_host_storlet_cache_dir(self):
|
||||
return os.path.join(self.host_cache_root, self.scope,self.storlet_container)
|
||||
|
||||
return os.path.join(self.host_cache_root, self.scope,
|
||||
self.storlet_container)
|
||||
|
||||
def get_host_dependency_cache_dir(self):
|
||||
return os.path.join(self.host_cache_root, self.scope,self.storlet_dependency)
|
||||
return os.path.join(self.host_cache_root, self.scope,
|
||||
self.storlet_dependency)
|
||||
|
||||
'''---------------------------------------------------------------------------
|
||||
Docker Stateful Container API
|
||||
The RunTimeSandbox serve as an API between the Docker Gateway and
|
||||
The RunTimeSandbox serve as an API between the Docker Gateway and
|
||||
a re-usable per account sandbox
|
||||
---------------------------------------------------------------------------'''
|
||||
class RunTimeSandbox():
|
||||
'''
|
||||
The RunTimeSandbox represents a re-usable per account sandbox. The sandbox
|
||||
is re-usable in the sense that it can run several storlet daemons.
|
||||
|
||||
|
||||
|
||||
class RunTimeSandbox(object):
|
||||
'''The RunTimeSandbox represents a re-usable per account sandbox.
|
||||
|
||||
The sandbox is re-usable in the sense that it can run several storlet
|
||||
daemons.
|
||||
|
||||
The following methods are supported:
|
||||
ping - pings the sandbox for liveness
|
||||
wait - wait for the sandbox to be ready for processing commands
|
||||
@ -182,22 +194,24 @@ class RunTimeSandbox():
|
||||
self.account = account
|
||||
|
||||
self.sandbox_ping_interval = 0.5
|
||||
self.sandbox_wait_timeout = int(conf['restart_linux_container_timeout'])
|
||||
self.sandbox_wait_timeout = \
|
||||
int(conf['restart_linux_container_timeout'])
|
||||
|
||||
self.docker_repo = conf['docker_repo']
|
||||
self.docker_image_name_prefix = 'tenant'
|
||||
|
||||
# TODO: should come from upper layer Storlet metadata
|
||||
# TODO(should come from upper layer Storlet metadata)
|
||||
self.storlet_language = 'java'
|
||||
|
||||
# TODO: add line in conf
|
||||
self.storlet_daemon_thread_pool_size = int(conf.get('storlet_daemon_thread_pool_size',5))
|
||||
self.storlet_daemon_debug_level = conf.get('storlet_daemon_debug_level','TRACE')
|
||||
|
||||
# TODO: change logger's route if possible
|
||||
|
||||
# TODO(add line in conf)
|
||||
self.storlet_daemon_thread_pool_size = \
|
||||
int(conf.get('storlet_daemon_thread_pool_size', 5))
|
||||
self.storlet_daemon_debug_level = \
|
||||
conf.get('storlet_daemon_debug_level', 'TRACE')
|
||||
|
||||
# TODO(change logger's route if possible)
|
||||
self.logger = logger
|
||||
|
||||
|
||||
|
||||
def _parse_sandbox_factory_answer(self, str_answer):
|
||||
two_tokens = str_answer.split(':', 1)
|
||||
b_success = False
|
||||
@ -207,28 +221,28 @@ class RunTimeSandbox():
|
||||
|
||||
def ping(self):
|
||||
pipe_path = self.paths.host_factory_pipe()
|
||||
|
||||
|
||||
read_fd, write_fd = os.pipe()
|
||||
dtg = SBusDatagram.create_service_datagram( SBUS_CMD_PING, write_fd )
|
||||
rc = SBus.send( pipe_path, dtg )
|
||||
dtg = SBusDatagram.create_service_datagram(SBUS_CMD_PING, write_fd)
|
||||
rc = SBus.send(pipe_path, dtg)
|
||||
if (rc < 0):
|
||||
return -1
|
||||
|
||||
reply = os.read(read_fd,10)
|
||||
os.close(read_fd)
|
||||
|
||||
reply = os.read(read_fd, 10)
|
||||
os.close(read_fd)
|
||||
os.close(write_fd)
|
||||
|
||||
res, error_txt = self._parse_sandbox_factory_answer(reply)
|
||||
if res == True:
|
||||
if res is True:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def wait(self):
|
||||
do_wait = True
|
||||
up = 0
|
||||
to = Timeout(self.sandbox_wait_timeout)
|
||||
try:
|
||||
while do_wait == True:
|
||||
while do_wait is True:
|
||||
rc = self.ping()
|
||||
if (rc != 1):
|
||||
time.sleep(self.sandbox_ping_interval)
|
||||
@ -237,18 +251,17 @@ class RunTimeSandbox():
|
||||
to.cancel()
|
||||
do_wait = False
|
||||
up = 1
|
||||
except Timeout as t:
|
||||
except Timeout:
|
||||
self.logger.info("wait for sandbox %s timedout" % self.account)
|
||||
do_wait = False
|
||||
finally:
|
||||
to.cancel()
|
||||
|
||||
return up
|
||||
|
||||
|
||||
def restart(self):
|
||||
'''
|
||||
Restarts the account's sandbox
|
||||
|
||||
'''Restarts the account's sandbox
|
||||
|
||||
Returned value:
|
||||
True - If the sandbox was started successfully
|
||||
False - Otherwise
|
||||
@ -259,109 +272,115 @@ class RunTimeSandbox():
|
||||
account_id = self.account[len('auth_'):]
|
||||
else:
|
||||
account_id = self.account
|
||||
|
||||
|
||||
self.paths.create_host_pipe_prefix()
|
||||
|
||||
|
||||
docker_container_name = '%s_%s' % (self.docker_image_name_prefix,
|
||||
account_id)
|
||||
docker_image_name = '%s/%s' % (self.docker_repo,account_id)
|
||||
pipe_mount = '%s:%s' % (self.paths.host_pipe_prefix(),
|
||||
account_id)
|
||||
docker_image_name = '%s/%s' % (self.docker_repo, account_id)
|
||||
pipe_mount = '%s:%s' % (self.paths.host_pipe_prefix(),
|
||||
self.paths.sandbox_pipe_prefix)
|
||||
|
||||
storlet_mount = '%s:%s' % (self.paths.host_storlet_prefix(),
|
||||
|
||||
storlet_mount = '%s:%s' % (self.paths.host_storlet_prefix(),
|
||||
self.paths.sandbox_storlet_dir_prefix)
|
||||
|
||||
cmd = '%s/restart_docker_container %s %s %s %s' % (
|
||||
self.paths.host_restart_script_dir,
|
||||
docker_container_name,
|
||||
docker_image_name,
|
||||
pipe_mount,
|
||||
storlet_mount)
|
||||
|
||||
res = commands.getoutput(cmd)
|
||||
|
||||
cmd = [self.paths.host_restart_script_dir +
|
||||
'/restart_docker_container',
|
||||
docker_container_name, docker_image_name, pipe_mount,
|
||||
storlet_mount]
|
||||
|
||||
subprocess.call(cmd)
|
||||
return self.wait()
|
||||
|
||||
def start_storlet_daemon(self, spath, storlet_id):
|
||||
prms = {}
|
||||
prms['daemon_language'] = 'java'
|
||||
prms['storlet_path'] = spath
|
||||
prms['storlet_name'] = storlet_id
|
||||
prms['uds_path'] = self.paths.sbox_storlet_pipe(storlet_id)
|
||||
prms['log_level'] = self.storlet_daemon_debug_level
|
||||
prms['pool_size'] = self.storlet_daemon_thread_pool_size
|
||||
|
||||
prms['storlet_path'] = spath
|
||||
prms['storlet_name'] = storlet_id
|
||||
prms['uds_path'] = self.paths.sbox_storlet_pipe(storlet_id)
|
||||
prms['log_level'] = self.storlet_daemon_debug_level
|
||||
prms['pool_size'] = self.storlet_daemon_thread_pool_size
|
||||
|
||||
read_fd, write_fd = os.pipe()
|
||||
dtg = SBusDatagram.create_service_datagram( SBUS_CMD_START_DAEMON,
|
||||
write_fd )
|
||||
dtg.set_exec_params( prms )
|
||||
|
||||
dtg = SBusDatagram.create_service_datagram(SBUS_CMD_START_DAEMON,
|
||||
write_fd)
|
||||
dtg.set_exec_params(prms)
|
||||
|
||||
pipe_path = self.paths.host_factory_pipe()
|
||||
rc = SBus.send( pipe_path, dtg )
|
||||
rc = SBus.send(pipe_path, dtg)
|
||||
if (rc < 0):
|
||||
return -1
|
||||
reply = os.read(read_fd,10)
|
||||
os.close(read_fd)
|
||||
reply = os.read(read_fd, 10)
|
||||
os.close(read_fd)
|
||||
os.close(write_fd)
|
||||
|
||||
res, error_txt = self._parse_sandbox_factory_answer(reply)
|
||||
if res == True:
|
||||
if res is True:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def stop_storlet_daemon(self, storlet_id):
|
||||
read_fd, write_fd = os.pipe()
|
||||
dtg = SBusDatagram.create_service_datagram( SBUS_CMD_STOP_DAEMON,
|
||||
write_fd )
|
||||
dtg = SBusDatagram.create_service_datagram(SBUS_CMD_STOP_DAEMON,
|
||||
write_fd)
|
||||
dtg.add_exec_param('storlet_name', storlet_id)
|
||||
pipe_path = self.paths.host_factory_pipe()
|
||||
rc = SBus.send( pipe_path, dtg )
|
||||
rc = SBus.send(pipe_path, dtg)
|
||||
if (rc < 0):
|
||||
self.logger.info("Failed to send status command to %s %s" % (self.account, storlet_id))
|
||||
self.logger.info("Failed to send status command to %s %s" %
|
||||
(self.account, storlet_id))
|
||||
return -1
|
||||
|
||||
reply = os.read(read_fd,10)
|
||||
os.close(read_fd)
|
||||
|
||||
reply = os.read(read_fd, 10)
|
||||
os.close(read_fd)
|
||||
os.close(write_fd)
|
||||
|
||||
res, error_txt = self._parse_sandbox_factory_answer(reply)
|
||||
if res == True:
|
||||
if res is True:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def get_storlet_daemon_status(self, storlet_id):
|
||||
read_fd, write_fd = os.pipe()
|
||||
dtg = SBusDatagram.create_service_datagram( SBUS_CMD_DAEMON_STATUS,
|
||||
write_fd )
|
||||
dtg.add_exec_param( 'storlet_name', storlet_id)
|
||||
dtg = SBusDatagram.create_service_datagram(SBUS_CMD_DAEMON_STATUS,
|
||||
write_fd)
|
||||
dtg.add_exec_param('storlet_name', storlet_id)
|
||||
pipe_path = self.paths.host_factory_pipe()
|
||||
rc = SBus.send(pipe_path, dtg)
|
||||
if (rc < 0):
|
||||
self.logger.info("Failed to send status command to %s %s" % (self.account, storlet_id))
|
||||
self.logger.info("Failed to send status command to %s %s" %
|
||||
(self.account, storlet_id))
|
||||
return -1
|
||||
reply = os.read(read_fd,10)
|
||||
os.close(read_fd)
|
||||
reply = os.read(read_fd, 10)
|
||||
os.close(read_fd)
|
||||
os.close(write_fd)
|
||||
|
||||
res, error_txt = self._parse_sandbox_factory_answer(reply)
|
||||
if res == True:
|
||||
res, error_txt = self._parse_sandbox_factory_answer(reply)
|
||||
if res is True:
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def activate_storlet_daemon(self, invocation_data, cache_updated = True):
|
||||
storlet_daemon_status = self.get_storlet_daemon_status(invocation_data['storlet_main_class'])
|
||||
def activate_storlet_daemon(self, invocation_data, cache_updated=True):
|
||||
storlet_daemon_status = \
|
||||
self.get_storlet_daemon_status(invocation_data[
|
||||
'storlet_main_class'])
|
||||
if (storlet_daemon_status == -1):
|
||||
# We failed to send a command to the factory.
|
||||
# Best we can do is execute the container.
|
||||
self.logger.debug('Failed to check Storlet daemon status, restart Docker container')
|
||||
self.logger.debug('Failed to check Storlet daemon status, '
|
||||
'restart Docker container')
|
||||
res = self.restart()
|
||||
if (res != 1):
|
||||
raise Exception('Docker container is not responsive')
|
||||
storlet_daemon_status = 0
|
||||
|
||||
if (cache_updated == True and storlet_daemon_status == 1):
|
||||
# The cache was updated while the daemon is running we need to stop it.
|
||||
self.logger.debug('The cache was updated, and the storlet daemon is running. Stopping daemon')
|
||||
res = self.stop_storlet_daemon( invocation_data['storlet_main_class'] )
|
||||
|
||||
if (cache_updated is True and storlet_daemon_status == 1):
|
||||
# The cache was updated while the daemon is running we need to
|
||||
# stop it.
|
||||
self.logger.debug('The cache was updated, and the storlet daemon '
|
||||
'is running. Stopping daemon')
|
||||
res = \
|
||||
self.stop_storlet_daemon(invocation_data['storlet_main_class'])
|
||||
if res != 1:
|
||||
res = self.restart()
|
||||
if (res != 1):
|
||||
@ -369,47 +388,53 @@ class RunTimeSandbox():
|
||||
else:
|
||||
self.logger.debug('Deamon stopped')
|
||||
storlet_daemon_status = 0
|
||||
|
||||
|
||||
if (storlet_daemon_status == 0):
|
||||
self.logger.debug('Going to start storlet daemon!')
|
||||
class_path = '/home/swift/%s/%s' % (invocation_data['storlet_main_class'],
|
||||
invocation_data['storlet_name'])
|
||||
class_path = \
|
||||
'/home/swift/%s/%s' % (invocation_data['storlet_main_class'],
|
||||
invocation_data['storlet_name'])
|
||||
for dep in invocation_data['storlet_dependency'].split(','):
|
||||
class_path = '%s:/home/swift/%s/%s' %\
|
||||
(class_path,
|
||||
invocation_data['storlet_main_class'],
|
||||
dep)
|
||||
|
||||
daemon_status = self.start_storlet_daemon(
|
||||
class_path,
|
||||
invocation_data['storlet_main_class'])
|
||||
class_path = '%s:/home/swift/%s/%s' % \
|
||||
(class_path,
|
||||
invocation_data['storlet_main_class'],
|
||||
dep)
|
||||
|
||||
daemon_status = \
|
||||
self.start_storlet_daemon(class_path,
|
||||
invocation_data[
|
||||
'storlet_main_class'])
|
||||
|
||||
if daemon_status != 1:
|
||||
self.logger.error('Daemon start Failed, returned code is %d' % daemon_status)
|
||||
self.logger.error('Daemon start Failed, returned code is %d' %
|
||||
daemon_status)
|
||||
raise Exception('Daemon start failed')
|
||||
else:
|
||||
self.logger.debug('Daemon started')
|
||||
|
||||
'''---------------------------------------------------------------------------
|
||||
Storlet Daemon API
|
||||
The StorletInvocationGETProtocol, StorletInvocationPUTProtocol, StorletInvocationSLOProtocol
|
||||
server as an API between the Docker Gateway and the Storlet Daemon which
|
||||
The StorletInvocationGETProtocol, StorletInvocationPUTProtocol,
|
||||
StorletInvocationSLOProtocol
|
||||
server as an API between the Docker Gateway and the Storlet Daemon which
|
||||
runs inside the Docker container. These classes implement the Storlet execution
|
||||
protocol
|
||||
---------------------------------------------------------------------------'''
|
||||
class StorletInvocationProtocol():
|
||||
|
||||
|
||||
class StorletInvocationProtocol(object):
|
||||
|
||||
def _add_input_stream(self, appendFd):
|
||||
#self.fds.append(self.srequest.stream
|
||||
# self.fds.append(self.srequest.stream
|
||||
self.fds.append(appendFd)
|
||||
# TODO: Break request metadata and systemmetadata
|
||||
# TODO(Break request metadata and systemmetadata)
|
||||
md = dict()
|
||||
md['type'] = SBUS_FD_INPUT_OBJECT
|
||||
if self.srequest.user_metadata is not None:
|
||||
for key, val in self.srequest.user_metadata.iteritems():
|
||||
md[key] = val
|
||||
self.fdmd.append(md)
|
||||
|
||||
|
||||
def _add_output_stream(self):
|
||||
self.fds.append(self.execution_str_write_fd)
|
||||
md = dict()
|
||||
@ -425,26 +450,26 @@ class StorletInvocationProtocol():
|
||||
md = dict()
|
||||
md['type'] = SBUS_FD_OUTPUT_OBJECT_METADATA
|
||||
self.fdmd.append(md)
|
||||
|
||||
|
||||
def _add_logger_stream(self):
|
||||
self.fds.append(self.storlet_logger.getfd())
|
||||
md = dict()
|
||||
md['type'] = SBUS_FD_LOGGER
|
||||
self.fdmd.append(md)
|
||||
|
||||
|
||||
def _prepare_invocation_descriptors(self):
|
||||
# Add the input stream
|
||||
self._add_input_stream()
|
||||
|
||||
# Add the output stream
|
||||
# Add the output stream
|
||||
self.data_read_fd, self.data_write_fd = os.pipe()
|
||||
self.execution_str_read_fd, self.execution_str_write_fd = os.pipe()
|
||||
self.metadata_read_fd, self.metadata_write_fd = os.pipe()
|
||||
self._add_output_stream()
|
||||
|
||||
|
||||
# Add the logger
|
||||
self._add_logger_stream()
|
||||
|
||||
|
||||
def _close_remote_side_descriptors(self):
|
||||
if self.data_write_fd:
|
||||
os.close(self.data_write_fd)
|
||||
@ -455,42 +480,42 @@ class StorletInvocationProtocol():
|
||||
|
||||
def _cancel(self):
|
||||
read_fd, write_fd = os.pipe()
|
||||
dtg = SBusDatagram.create_service_datagram( SBUS_CMD_CANCEL, write_fd )
|
||||
dtg = SBusDatagram.create_service_datagram(SBUS_CMD_CANCEL, write_fd)
|
||||
dtg.set_task_id(self.task_id)
|
||||
rc = SBus.send( self.storlet_pipe_path, dtg )
|
||||
rc = SBus.send(self.storlet_pipe_path, dtg)
|
||||
if (rc < 0):
|
||||
return -1
|
||||
|
||||
reply = os.read(read_fd,10)
|
||||
os.read(read_fd, 10)
|
||||
os.close(read_fd)
|
||||
os.close(write_fd)
|
||||
|
||||
|
||||
def _invoke(self):
|
||||
dtg = SBusDatagram()
|
||||
dtg.set_files( self.fds )
|
||||
dtg.set_metadata( self.fdmd )
|
||||
dtg.set_exec_params( self.srequest.params )
|
||||
dtg = SBusDatagram()
|
||||
dtg.set_files(self.fds)
|
||||
dtg.set_metadata(self.fdmd)
|
||||
dtg.set_exec_params(self.srequest.params)
|
||||
dtg.set_command(SBUS_CMD_EXECUTE)
|
||||
rc = SBus.send( self.storlet_pipe_path, dtg )
|
||||
|
||||
rc = SBus.send(self.storlet_pipe_path, dtg)
|
||||
|
||||
if (rc < 0):
|
||||
raise Exception("Failed to send execute command")
|
||||
|
||||
self._wait_for_read_with_timeout(self.execution_str_read_fd)
|
||||
self.task_id = os.read(self.execution_str_read_fd, 10)
|
||||
|
||||
def __init__(self, srequest, storlet_pipe_path, storlet_logger_path, timeout):
|
||||
def __init__(self, srequest, storlet_pipe_path, storlet_logger_path,
|
||||
timeout):
|
||||
self.srequest = srequest
|
||||
self.storlet_pipe_path = storlet_pipe_path
|
||||
self.storlet_logger_path = storlet_logger_path
|
||||
self.timeout = timeout
|
||||
|
||||
|
||||
# remote side file descriptors and their metadata lists
|
||||
# to be sent as part of invocation
|
||||
self.fds = list()
|
||||
self.fdmd = list()
|
||||
|
||||
|
||||
# local side file descriptors
|
||||
self.data_read_fd = None
|
||||
self.data_write_fd = None
|
||||
@ -499,38 +524,42 @@ class StorletInvocationProtocol():
|
||||
self.execution_str_read_fd = None
|
||||
self.execution_str_write_fd = None
|
||||
self.task_id = None
|
||||
|
||||
|
||||
if not os.path.exists(storlet_logger_path):
|
||||
os.makedirs(storlet_logger_path)
|
||||
|
||||
def _wait_for_read_with_timeout(self, fd):
|
||||
r, w, e = select.select([ fd ], [], [ ], self.timeout)
|
||||
r, w, e = select.select([fd], [], [], self.timeout)
|
||||
if len(r) == 0:
|
||||
if self.task_id:
|
||||
self._cancel()
|
||||
raise Timeout('Timeout while waiting for storlet output')
|
||||
if fd in r:
|
||||
return
|
||||
|
||||
|
||||
def _read_metadata(self):
|
||||
self._wait_for_read_with_timeout(self.metadata_read_fd)
|
||||
flat_json = os.read(self.metadata_read_fd, MAX_META_OVERALL_SIZE)
|
||||
if flat_json is not None:
|
||||
md = json.loads(flat_json)
|
||||
return md
|
||||
|
||||
|
||||
|
||||
class StorletInvocationGETProtocol(StorletInvocationProtocol):
|
||||
|
||||
|
||||
def _add_input_stream(self):
|
||||
StorletInvocationProtocol._add_input_stream(self, self.srequest.stream)
|
||||
|
||||
def __init__(self, srequest, storlet_pipe_path, storlet_logger_path, timeout):
|
||||
StorletInvocationProtocol.__init__(self, srequest, storlet_pipe_path, storlet_logger_path, timeout)
|
||||
|
||||
def communicate(self):
|
||||
self.storlet_logger = StorletLogger(self.storlet_logger_path, 'storlet_invoke')
|
||||
def __init__(self, srequest, storlet_pipe_path, storlet_logger_path,
|
||||
timeout):
|
||||
StorletInvocationProtocol.__init__(self, srequest, storlet_pipe_path,
|
||||
storlet_logger_path, timeout)
|
||||
|
||||
def communicate(self):
|
||||
self.storlet_logger = StorletLogger(self.storlet_logger_path,
|
||||
'storlet_invoke')
|
||||
self.storlet_logger.open()
|
||||
|
||||
|
||||
self._prepare_invocation_descriptors()
|
||||
try:
|
||||
self._invoke()
|
||||
@ -539,33 +568,37 @@ class StorletInvocationGETProtocol(StorletInvocationProtocol):
|
||||
finally:
|
||||
self._close_remote_side_descriptors()
|
||||
self.storlet_logger.close()
|
||||
|
||||
|
||||
out_md = self._read_metadata()
|
||||
os.close(self.metadata_read_fd)
|
||||
os.close(self.metadata_read_fd)
|
||||
self._wait_for_read_with_timeout(self.data_read_fd)
|
||||
os.close(self.execution_str_read_fd)
|
||||
|
||||
os.close(self.execution_str_read_fd)
|
||||
|
||||
return out_md, self.data_read_fd
|
||||
|
||||
|
||||
class StorletInvocationProxyProtocol(StorletInvocationProtocol):
|
||||
|
||||
def __init__(self, srequest, storlet_pipe_path, storlet_logger_path, timeout):
|
||||
StorletInvocationProtocol.__init__(self, srequest, storlet_pipe_path, storlet_logger_path, timeout)
|
||||
def __init__(self, srequest, storlet_pipe_path, storlet_logger_path,
|
||||
timeout):
|
||||
StorletInvocationProtocol.__init__(self, srequest, storlet_pipe_path,
|
||||
storlet_logger_path, timeout)
|
||||
self.input_data_read_fd, self.input_data_write_fd = os.pipe()
|
||||
# YM this pipe permits to take data from srequest.stream to input_data_write_fd
|
||||
# YM this pipe permits to take data from srequest.stream to
|
||||
# input_data_write_fd
|
||||
# YM the write side stays with us, the read side is sent to storlet
|
||||
|
||||
|
||||
def _add_input_stream(self):
|
||||
StorletInvocationProtocol._add_input_stream(self, self.input_data_read_fd)
|
||||
StorletInvocationProtocol._add_input_stream(self,
|
||||
self.input_data_read_fd)
|
||||
|
||||
def _wait_for_write_with_timeout(self,fd):
|
||||
r, w, e = select.select([ ], [ fd ], [ ], self.timeout)
|
||||
def _wait_for_write_with_timeout(self, fd):
|
||||
r, w, e = select.select([], [fd], [], self.timeout)
|
||||
if len(w) == 0:
|
||||
raise Timeout('Timeout while waiting for storlet to read')
|
||||
if fd in w:
|
||||
return
|
||||
|
||||
|
||||
def _write_with_timeout(self, writer, chunk):
|
||||
timeout = Timeout(self.timeout)
|
||||
try:
|
||||
@ -580,9 +613,10 @@ class StorletInvocationProxyProtocol(StorletInvocationProtocol):
|
||||
timeout.cancel()
|
||||
|
||||
def communicate(self):
|
||||
self.storlet_logger = StorletLogger(self.storlet_logger_path, 'storlet_invoke')
|
||||
self.storlet_logger = StorletLogger(self.storlet_logger_path,
|
||||
'storlet_invoke')
|
||||
self.storlet_logger.open()
|
||||
|
||||
|
||||
self._prepare_invocation_descriptors()
|
||||
try:
|
||||
self._invoke()
|
||||
@ -591,7 +625,7 @@ class StorletInvocationProxyProtocol(StorletInvocationProtocol):
|
||||
finally:
|
||||
self._close_remote_side_descriptors()
|
||||
self.storlet_logger.close()
|
||||
|
||||
|
||||
self._wait_for_write_with_timeout(self.input_data_write_fd)
|
||||
# We do the writing in a different thread.
|
||||
# Otherwise, we can run into the following deadlock
|
||||
@ -599,19 +633,23 @@ class StorletInvocationProxyProtocol(StorletInvocationProtocol):
|
||||
# 2. Storlet reads and starts to write metadata and then data
|
||||
# 3. middleware continues writing
|
||||
# 4. Storlet continues writing and gets stuck as middleware
|
||||
# is busy writing, but still not consuming the reader end
|
||||
# is busy writing, but still not consuming the reader end
|
||||
# of the Storlet writer.
|
||||
eventlet.spawn_n(self._write_input_data)
|
||||
out_md = self._read_metadata()
|
||||
self._wait_for_read_with_timeout(self.data_read_fd)
|
||||
|
||||
|
||||
return out_md, self.data_read_fd
|
||||
|
||||
|
||||
|
||||
class StorletInvocationPUTProtocol(StorletInvocationProxyProtocol):
|
||||
|
||||
def __init__(self, srequest, storlet_pipe_path, storlet_logger_path, timeout):
|
||||
StorletInvocationProxyProtocol.__init__(self, srequest, storlet_pipe_path, storlet_logger_path, timeout)
|
||||
|
||||
|
||||
def __init__(self, srequest, storlet_pipe_path, storlet_logger_path,
|
||||
timeout):
|
||||
StorletInvocationProxyProtocol.__init__(self, srequest,
|
||||
storlet_pipe_path,
|
||||
storlet_logger_path, timeout)
|
||||
|
||||
def _write_input_data(self):
|
||||
writer = os.fdopen(self.input_data_write_fd, 'w')
|
||||
reader = self.srequest.stream
|
||||
@ -621,10 +659,13 @@ class StorletInvocationPUTProtocol(StorletInvocationProxyProtocol):
|
||||
|
||||
|
||||
class StorletInvocationSLOProtocol(StorletInvocationProxyProtocol):
|
||||
|
||||
def __init__(self, srequest, storlet_pipe_path, storlet_logger_path, timeout):
|
||||
StorletInvocationProxyProtocol.__init__(self, srequest, storlet_pipe_path, storlet_logger_path, timeout)
|
||||
|
||||
|
||||
def __init__(self, srequest, storlet_pipe_path, storlet_logger_path,
|
||||
timeout):
|
||||
StorletInvocationProxyProtocol.__init__(self, srequest,
|
||||
storlet_pipe_path,
|
||||
storlet_logger_path, timeout)
|
||||
|
||||
def _write_input_data(self):
|
||||
writer = os.fdopen(self.input_data_write_fd, 'w')
|
||||
reader = self.srequest.stream
|
||||
@ -633,4 +674,3 @@ class StorletInvocationSLOProtocol(StorletInvocationProxyProtocol):
|
||||
self._write_with_timeout(writer, chunk)
|
||||
# print >> sys.stderr, 'next SLO chunk...%d'% len(chunk)
|
||||
writer.close()
|
||||
|
||||
|
@ -1,4 +1,20 @@
|
||||
class StorletStubBase():
|
||||
'''-------------------------------------------------------------------------
|
||||
Copyright IBM Corp. 2015, 2015 All Rights Reserved
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
class StorletStubBase(object):
|
||||
|
||||
def __init__(self, storlet_conf, logger, app, version, account,
|
||||
container, obj):
|
||||
|
@ -1,67 +1,66 @@
|
||||
#-----------------------------------------------------------------------------------------------
|
||||
# Copyright IBM Corp. 2015, 2015 All Rights Reserved
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# Limitations under the License.
|
||||
#-----------------------------------------------------------------------------------------------
|
||||
'''-------------------------------------------------------------------------
|
||||
Copyright IBM Corp. 2015, 2015 All Rights Reserved
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
|
||||
'''
|
||||
Created on Feb 18, 2014
|
||||
|
||||
@author: gilv
|
||||
'''
|
||||
from eventlet.timeout import Timeout
|
||||
import traceback
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
|
||||
import select
|
||||
|
||||
class StorletTimeout(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class StorletLogger(object):
|
||||
def __init__(self, path, name):
|
||||
self.full_path = os.path.join(path,'%s.log' % name)
|
||||
|
||||
self.full_path = os.path.join(path, '%s.log' % name)
|
||||
|
||||
def open(self):
|
||||
self.file = open(self.full_path,'a')
|
||||
|
||||
self.file = open(self.full_path, 'a')
|
||||
|
||||
def getfd(self):
|
||||
return self.file.fileno()
|
||||
|
||||
|
||||
def getsize(self):
|
||||
statinfo = os.stat(self.full_path)
|
||||
return statinfo.st_size
|
||||
|
||||
|
||||
def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
def fobj(self):
|
||||
return open(self.full_path, 'r')
|
||||
|
||||
|
||||
class StorletException(object):
|
||||
|
||||
### Print details about the code line which caused the exception
|
||||
# Print details about the code line which caused the exception
|
||||
@staticmethod
|
||||
def handle( logger, exc ):
|
||||
logger.info('-'*60)
|
||||
def handle(logger, exc):
|
||||
logger.info('-' * 60)
|
||||
logger.info(exc)
|
||||
### logging.exception()
|
||||
# logging.exception()
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
logger.info('-'*60)
|
||||
logger.info('-' * 60)
|
||||
|
||||
|
||||
class StorletGatewayBase():
|
||||
class StorletGatewayBase(object):
|
||||
|
||||
def validateStorletUpload(self, request):
|
||||
raise NotImplementedError("Not implemented: validateStorletUpload")
|
||||
@ -81,6 +80,7 @@ class StorletGatewayBase():
|
||||
def gatewayObjectGetFlow(self, request, container, obj, original_response):
|
||||
raise NotImplementedError("Not implemented: gatewayObjectGetFlow")
|
||||
|
||||
|
||||
class StorletStubGateway(StorletGatewayBase):
|
||||
|
||||
def __init__(self, sconf, logger, app, version, account, container,
|
||||
@ -93,10 +93,10 @@ class StorletStubGateway(StorletGatewayBase):
|
||||
self.obj = obj
|
||||
self.sconf = sconf
|
||||
self.dummy_stream = os.pipe()
|
||||
self.dummy_content = sconf.get('dummy_content','aaaa')
|
||||
self.dummy_content = sconf.get('dummy_content', 'aaaa')
|
||||
|
||||
def dummy_invocation(self):
|
||||
os.write(self.dummy_stream[1],self.dummy_content)
|
||||
os.write(self.dummy_stream[1], self.dummy_content)
|
||||
os.close(self.dummy_stream[1])
|
||||
return self.dummy_stream[0], {}
|
||||
|
||||
|
@ -19,34 +19,38 @@ Created on Feb 18, 2014
|
||||
@author: Gil Vernik
|
||||
'''
|
||||
|
||||
from storlet_common import StorletTimeout,StorletException
|
||||
|
||||
from swift.common.utils import get_logger, register_swift_info, is_success, config_true_value
|
||||
from swift.common.swob import Request, Response, wsgify, \
|
||||
HTTPBadRequest, HTTPUnauthorized, \
|
||||
HTTPInternalServerError
|
||||
from swift.proxy.controllers.base import get_account_info
|
||||
from swift.common.exceptions import ConnectionTimeout
|
||||
from eventlet import Timeout
|
||||
|
||||
import ConfigParser
|
||||
import os
|
||||
import sys
|
||||
from eventlet import Timeout
|
||||
from storlet_common import StorletException
|
||||
from storlet_common import StorletTimeout
|
||||
from swift.common.exceptions import ConnectionTimeout
|
||||
from swift.common.swob import HTTPBadRequest
|
||||
from swift.common.swob import HTTPInternalServerError
|
||||
from swift.common.swob import HTTPUnauthorized
|
||||
from swift.common.swob import Request
|
||||
from swift.common.swob import Response
|
||||
from swift.common.swob import wsgify
|
||||
from swift.common.utils import config_true_value
|
||||
from swift.common.utils import get_logger
|
||||
from swift.common.utils import is_success
|
||||
from swift.common.utils import register_swift_info
|
||||
from swift.proxy.controllers.base import get_account_info
|
||||
|
||||
|
||||
class StorletHandlerMiddleware(object):
|
||||
|
||||
|
||||
def __init__(self, app, conf, storlet_conf):
|
||||
self.app = app
|
||||
self.logger = get_logger(conf, log_route='storlet_handler')
|
||||
self.stimeout = int(storlet_conf.get('storlet_timeout'))
|
||||
self.storlet_containers = [ storlet_conf.get('storlet_container'),
|
||||
self.storlet_containers = [storlet_conf.get('storlet_container'),
|
||||
storlet_conf.get('storlet_dependency')]
|
||||
self.execution_server = storlet_conf.get('execution_server')
|
||||
self.gateway_module = storlet_conf['gateway_module']
|
||||
self.proxy_only_storlet_execution = storlet_conf['storlet_execute_on_proxy_only']
|
||||
self.proxy_only_storlet_execution = \
|
||||
storlet_conf['storlet_execute_on_proxy_only']
|
||||
self.gateway_conf = storlet_conf
|
||||
|
||||
|
||||
@wsgify
|
||||
def __call__(self, req):
|
||||
try:
|
||||
@ -59,25 +63,22 @@ class StorletHandlerMiddleware(object):
|
||||
version = '0'
|
||||
except Exception as e:
|
||||
return req.get_response(self.app)
|
||||
|
||||
|
||||
self.logger.debug('storlet_handler call in %s: with %s/%s/%s' %
|
||||
(self.execution_server,
|
||||
account,
|
||||
container,
|
||||
obj))
|
||||
(self.execution_server, account, container, obj))
|
||||
|
||||
storlet_execution = False
|
||||
if 'X-Run-Storlet' in req.headers:
|
||||
storlet_execution = True
|
||||
if (storlet_execution == True and account and container and obj) or \
|
||||
(container in self.storlet_containers and obj):
|
||||
gateway = self.gateway_module(self.gateway_conf,
|
||||
self.logger, self.app, version, account,
|
||||
container, obj)
|
||||
if (storlet_execution is True and account and container and obj) or \
|
||||
(container in self.storlet_containers and obj):
|
||||
gateway = self.gateway_module(self.gateway_conf,
|
||||
self.logger, self.app, version,
|
||||
account, container, obj)
|
||||
else:
|
||||
return req.get_response(self.app)
|
||||
return req.get_response(self.app)
|
||||
|
||||
try:
|
||||
try:
|
||||
if self.execution_server == 'object' and storlet_execution:
|
||||
if req.method == 'GET':
|
||||
self.logger.info('GET. Run storlet')
|
||||
@ -86,41 +87,38 @@ class StorletHandlerMiddleware(object):
|
||||
if not is_success(orig_resp.status_int):
|
||||
return orig_resp
|
||||
|
||||
if self._is_range_request(req) == True or \
|
||||
self._is_slo_get_request(req, orig_resp, account, \
|
||||
container, obj) or \
|
||||
self.proxy_only_storlet_execution == True:
|
||||
if self._is_range_request(req) is True or \
|
||||
self._is_slo_get_request(req, orig_resp, account,
|
||||
container, obj) or \
|
||||
self.proxy_only_storlet_execution is True:
|
||||
# For SLOs, and proxy only mode
|
||||
# Storlet are executed on the proxy
|
||||
# Storlet are executed on the proxy
|
||||
# Therefore we return the object part without
|
||||
# Storlet invocation:
|
||||
self.logger.info(
|
||||
'storlet_handler: invocation over %s/%s/%s %s' %
|
||||
(account, container, obj,
|
||||
'to be executed on proxy'))
|
||||
self.logger.info('storlet_handler: invocation '
|
||||
'over %s/%s/%s %s' %
|
||||
(account, container, obj,
|
||||
'to be executed on proxy'))
|
||||
return orig_resp
|
||||
else:
|
||||
else:
|
||||
# We apply here the Storlet:
|
||||
self.logger.info(
|
||||
'storlet_handler: invocation over %s/%s/%s %s' %
|
||||
(account, container, obj,
|
||||
'to be executed locally'))
|
||||
self.logger.info('storlet_handler: invocation '
|
||||
'over %s/%s/%s %s' %
|
||||
(account, container, obj,
|
||||
'to be executed locally'))
|
||||
old_env = req.environ.copy()
|
||||
orig_req = Request.blank(old_env['PATH_INFO'], old_env)
|
||||
(out_md, app_iter) = gateway.gatewayObjectGetFlow(req,
|
||||
container,
|
||||
obj,
|
||||
orig_resp)
|
||||
(out_md, app_iter) = \
|
||||
gateway.gatewayObjectGetFlow(req, container,
|
||||
obj, orig_resp)
|
||||
if 'Content-Length' in orig_resp.headers:
|
||||
orig_resp.headers.pop('Content-Length')
|
||||
if 'Transfer-Encoding' in orig_resp.headers:
|
||||
orig_resp.headers.pop('Transfer-Encoding')
|
||||
|
||||
return Response(
|
||||
app_iter,
|
||||
headers = orig_resp.headers,
|
||||
request=orig_req,
|
||||
conditional_response=True)
|
||||
|
||||
return Response(app_iter, headers=orig_resp.headers,
|
||||
request=orig_req,
|
||||
conditional_response=True)
|
||||
|
||||
elif (self.execution_server == 'proxy'):
|
||||
if (storlet_execution or container in self.storlet_containers):
|
||||
@ -137,7 +135,7 @@ class StorletHandlerMiddleware(object):
|
||||
return HTTPUnauthorized('Storlet: no permission')
|
||||
|
||||
# The get request may be a SLO object GET request.
|
||||
# Simplest solution would be to invoke a HEAD
|
||||
# Simplest solution would be to invoke a HEAD
|
||||
# for every GET request to test if we are in SLO case.
|
||||
# In order to save the HEAD overhead we implemented
|
||||
# a slightly more involved flow:
|
||||
@ -146,69 +144,69 @@ class StorletHandlerMiddleware(object):
|
||||
# At object side, we invoke the plain (non Storlet)
|
||||
# request and test if we are in SLO case.
|
||||
# and invoke Storlet only if non SLO case.
|
||||
# Back at proxy side, we test if test received
|
||||
# full object to detect if we are in SLO case,
|
||||
# Back at proxy side, we test if test received
|
||||
# full object to detect if we are in SLO case,
|
||||
# and invoke Storlet only if in SLO case.
|
||||
gateway.augmentStorletRequest(req)
|
||||
original_resp = req.get_response(self.app)
|
||||
|
||||
if self._is_range_request(req) == True or \
|
||||
self._is_slo_get_request(req, original_resp, account, \
|
||||
container, obj) or \
|
||||
self.proxy_only_storlet_execution == True:
|
||||
# SLO / proxy only case:
|
||||
if self._is_range_request(req) is True or \
|
||||
self._is_slo_get_request(req, original_resp,
|
||||
account,
|
||||
container, obj) or \
|
||||
self.proxy_only_storlet_execution is True:
|
||||
# SLO / proxy only case:
|
||||
# storlet to be invoked now at proxy side:
|
||||
(out_md, app_iter) = gateway.gatewayProxyGETFlow(req,
|
||||
container,
|
||||
obj,
|
||||
original_resp)
|
||||
(out_md, app_iter) = \
|
||||
gateway.gatewayProxyGETFlow(req, container, obj,
|
||||
original_resp)
|
||||
|
||||
# adapted from non SLO GET flow
|
||||
if is_success(original_resp.status_int):
|
||||
old_env = req.environ.copy()
|
||||
orig_req = Request.blank(old_env['PATH_INFO'], old_env)
|
||||
orig_req = Request.blank(old_env['PATH_INFO'],
|
||||
old_env)
|
||||
resp_headers = original_resp.headers
|
||||
|
||||
|
||||
resp_headers['Content-Length'] = None
|
||||
|
||||
return Response(
|
||||
app_iter=app_iter,
|
||||
headers=resp_headers,
|
||||
request=orig_req,
|
||||
conditional_response=True)
|
||||
return Response(app_iter=app_iter,
|
||||
headers=resp_headers,
|
||||
request=orig_req,
|
||||
conditional_response=True)
|
||||
return original_resp
|
||||
|
||||
else:
|
||||
# Non proxy GET case: Storlet was already invoked at object side
|
||||
# Non proxy GET case: Storlet was already invoked at
|
||||
# object side
|
||||
if 'Transfer-Encoding' in original_resp.headers:
|
||||
original_resp.headers.pop('Transfer-Encoding')
|
||||
|
||||
|
||||
if is_success(original_resp.status_int):
|
||||
old_env = req.environ.copy()
|
||||
orig_req = Request.blank(old_env['PATH_INFO'], old_env)
|
||||
orig_req = Request.blank(old_env['PATH_INFO'],
|
||||
old_env)
|
||||
resp_headers = original_resp.headers
|
||||
|
||||
|
||||
resp_headers['Content-Length'] = None
|
||||
return Response(
|
||||
app_iter=original_resp.app_iter,
|
||||
headers=resp_headers,
|
||||
request=orig_req,
|
||||
conditional_response=True)
|
||||
return Response(app_iter=original_resp.app_iter,
|
||||
headers=resp_headers,
|
||||
request=orig_req,
|
||||
conditional_response=True)
|
||||
return original_resp
|
||||
|
||||
elif req.method == 'PUT':
|
||||
if (container in self.storlet_containers):
|
||||
ret = gateway.validateStorletUpload(req)
|
||||
if ret:
|
||||
return HTTPBadRequest(body = ret)
|
||||
return HTTPBadRequest(body=ret)
|
||||
else:
|
||||
if not gateway.authorizeStorletExecution(req):
|
||||
return HTTPUnauthorized('Storlet: no permissions')
|
||||
if storlet_execution:
|
||||
gateway.augmentStorletRequest(req)
|
||||
(out_md, app_iter) = gateway.gatewayProxyPutFlow(req,
|
||||
container,
|
||||
obj)
|
||||
(out_md, app_iter) = \
|
||||
gateway.gatewayProxyPutFlow(req, container, obj)
|
||||
req.environ['wsgi.input'] = app_iter
|
||||
if 'CONTENT_LENGTH' in req.environ:
|
||||
req.environ.pop('CONTENT_LENGTH')
|
||||
@ -216,7 +214,7 @@ class StorletHandlerMiddleware(object):
|
||||
return req.get_response(self.app)
|
||||
|
||||
except (StorletTimeout, ConnectionTimeout, Timeout) as e:
|
||||
StorletException.handle(self.logger, e)
|
||||
StorletException.handle(self.logger, e)
|
||||
return HTTPInternalServerError(body='Storlet execution timed out')
|
||||
except Exception as e:
|
||||
StorletException.handle(self.logger, e)
|
||||
@ -229,14 +227,15 @@ class StorletHandlerMiddleware(object):
|
||||
args:
|
||||
req: the request
|
||||
'''
|
||||
|
||||
def _is_range_request(self, req):
|
||||
if 'Range' in req.headers:
|
||||
return True
|
||||
return False
|
||||
|
||||
'''
|
||||
Determines from a GET request and its associated response
|
||||
if the object is a SLO
|
||||
Determines from a GET request and its associated response
|
||||
if the object is a SLO
|
||||
args:
|
||||
req: the request
|
||||
resp: the response
|
||||
@ -244,39 +243,50 @@ class StorletHandlerMiddleware(object):
|
||||
container: the response as extracted from req
|
||||
obj: the response as extracted from req
|
||||
'''
|
||||
|
||||
def _is_slo_get_request(self, req, resp, account, container, obj):
|
||||
if req.method != 'GET':
|
||||
return False
|
||||
return False
|
||||
if req.params.get('multipart-manifest') == 'get':
|
||||
return False
|
||||
|
||||
self.logger.info( 'Verify if {0}/{1}/{2} is an SLO assembly object'.format(account,container, obj))
|
||||
self.logger.info('Verify if {0}/{1}/{2} is an SLO assembly object'.
|
||||
format(account, container, obj))
|
||||
|
||||
if resp.status_int < 300 and resp.status_int >= 200 :
|
||||
if resp.status_int < 300 and resp.status_int >= 200:
|
||||
for key in resp.headers:
|
||||
if (key.lower() == 'x-static-large-object' and
|
||||
config_true_value(resp.headers[key])):
|
||||
self.logger.info( '{0}/{1}/{2} is indeed an SLO assembly object'.format(account,container, obj))
|
||||
if (key.lower() == 'x-static-large-object'
|
||||
and config_true_value(resp.headers[key])):
|
||||
self.logger.info('{0}/{1}/{2} is indeed an SLO assembly '
|
||||
'object'.format(account, container, obj))
|
||||
return True
|
||||
self.logger.info( '{0}/{1}/{2} is NOT an SLO assembly object'.format(account,container, obj))
|
||||
self.logger.info('{0}/{1}/{2} is NOT an SLO assembly object'.
|
||||
format(account, container, obj))
|
||||
return False
|
||||
self.logger.error( 'Failed to check if {0}/{1}/{2} is an SLO assembly object. Got status {3}'.format(account,container, obj,resp.status))
|
||||
raise Exception('Failed to check if {0}/{1}/{2} is an SLO assembly object. Got status {3}'.format(account,container, obj,resp.status))
|
||||
self.logger.error('Failed to check if {0}/{1}/{2} is an SLO assembly '
|
||||
'object. Got status {3}'.
|
||||
format(account, container, obj, resp.status))
|
||||
raise Exception('Failed to check if {0}/{1}/{2} is an SLO assembly '
|
||||
'object. Got status {3}'.format(account, container,
|
||||
obj, resp.status))
|
||||
|
||||
|
||||
def filter_factory(global_conf, **local_conf):
|
||||
|
||||
conf = global_conf.copy()
|
||||
conf.update(local_conf)
|
||||
storlet_conf = dict()
|
||||
storlet_conf['storlet_timeout'] = conf.get('storlet_timeout',40)
|
||||
storlet_conf['storlet_container'] = conf.get('storlet_container','storlet')
|
||||
storlet_conf['storlet_timeout'] = conf.get('storlet_timeout', 40)
|
||||
storlet_conf['storlet_container'] = \
|
||||
conf.get('storlet_container', 'storlet')
|
||||
storlet_conf['storlet_dependency'] = conf.get('storlet_dependency',
|
||||
'dependency')
|
||||
storlet_conf['execution_server'] = conf.get('execution_server', '')
|
||||
storlet_conf['storlet_execute_on_proxy_only'] = config_true_value(conf.get('storlet_execute_on_proxy_only', 'false'))
|
||||
storlet_conf['storlet_execute_on_proxy_only'] = \
|
||||
config_true_value(conf.get('storlet_execute_on_proxy_only', 'false'))
|
||||
storlet_conf['gateway_conf'] = {}
|
||||
|
||||
module_name = conf.get('storlet_gateway_module','')
|
||||
module_name = conf.get('storlet_gateway_module', '')
|
||||
mo = module_name[:module_name.rfind(':')]
|
||||
cl = module_name[module_name.rfind(':') + 1:]
|
||||
module = __import__(mo, fromlist=[cl])
|
||||
@ -288,8 +298,8 @@ def filter_factory(global_conf, **local_conf):
|
||||
|
||||
additional_items = configParser.items("DEFAULT")
|
||||
for key, val in additional_items:
|
||||
storlet_conf[key]= val
|
||||
|
||||
storlet_conf[key] = val
|
||||
|
||||
swift_info = {}
|
||||
storlet_conf["gateway_module"] = the_class
|
||||
register_swift_info('storlet_handler', False, **swift_info)
|
||||
@ -297,4 +307,3 @@ def filter_factory(global_conf, **local_conf):
|
||||
def storlet_handler_filter(app):
|
||||
return StorletHandlerMiddleware(app, conf, storlet_conf)
|
||||
return storlet_handler_filter
|
||||
|
||||
|
4
HACKING.rst
Normal file
4
HACKING.rst
Normal file
@ -0,0 +1,4 @@
|
||||
storlets Style Commandments
|
||||
===============================================
|
||||
|
||||
Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/
|
28
LICENSE
28
LICENSE
@ -1,3 +1,4 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
@ -173,30 +174,3 @@
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
|
6
MANIFEST.in
Normal file
6
MANIFEST.in
Normal file
@ -0,0 +1,6 @@
|
||||
include AUTHORS
|
||||
include ChangeLog
|
||||
exclude .gitignore
|
||||
exclude .gitreview
|
||||
|
||||
global-exclude *.pyc
|
19
README.rst
Normal file
19
README.rst
Normal file
@ -0,0 +1,19 @@
|
||||
===============================
|
||||
storlets
|
||||
===============================
|
||||
|
||||
Middleware and Compute Engine for an OpenStack Swift compute framework that runs compute within a Swift cluster
|
||||
|
||||
Please feel here a long description which must be at least 3 lines wrapped on
|
||||
80 cols, so that distribution package maintainers can use it in their packages.
|
||||
Note that this is a hard requirement.
|
||||
|
||||
* Free software: Apache license
|
||||
* Documentation: http://docs.openstack.org/developer/storlets
|
||||
* Source: http://git.openstack.org/cgit/openstack/storlets
|
||||
* Bugs: http://bugs.launchpad.net/storlets
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
* TODO
|
@ -17,106 +17,121 @@ Limitations under the License.
|
||||
@author: gilv / cdoron / evgenyl
|
||||
'''
|
||||
|
||||
from sys_test_params import *
|
||||
from storlets_test_utils import put_storlet_object
|
||||
from swiftclient import client as c
|
||||
from sys_test_params import ACCOUNT
|
||||
from sys_test_params import AUTH_IP
|
||||
from sys_test_params import AUTH_PORT
|
||||
from sys_test_params import PASSWORD
|
||||
from sys_test_params import USER_NAME
|
||||
|
||||
from storlets_test_utils import put_storlet_containers, put_storlet_object
|
||||
|
||||
EXECDEP_PATH_TO_BUNDLE ='../StorletSamples/ExecDepStorlet/bin/'
|
||||
EXECDEP_STORLET_NAME='execdepstorlet-1.0.jar'
|
||||
EXECDEP_STORLET_LOG_NAME='execdepstorlet-1.0.log'
|
||||
EXECDEP_PATH_TO_BUNDLE = '../StorletSamples/ExecDepStorlet/bin/'
|
||||
EXECDEP_STORLET_NAME = 'execdepstorlet-1.0.jar'
|
||||
EXECDEP_STORLET_LOG_NAME = 'execdepstorlet-1.0.log'
|
||||
EXECDEP_JUNK_FILE = 'junk.txt'
|
||||
EXECDEP_DEPS_NAMES=['get42']
|
||||
EXECDEP_DEPS_NAMES = ['get42']
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
def put_storlet_dependency(url, token, dependency_name, local_path_to_dependency):
|
||||
|
||||
|
||||
def put_storlet_dependency(url, token, dependency_name,
|
||||
local_path_to_dependency):
|
||||
metadata = {'X-Object-Meta-Storlet-Dependency-Version': '1'}
|
||||
|
||||
f = open('%s/%s'% (local_path_to_dependency, dependency_name), 'r')
|
||||
f = open('%s/%s' % (local_path_to_dependency, dependency_name), 'r')
|
||||
content_length = None
|
||||
response = dict()
|
||||
c.put_object(url, token, 'dependency', dependency_name, f,
|
||||
content_length, None, None, "application/octet-stream", metadata, None, None, None, response)
|
||||
content_length, None, None, "application/octet-stream",
|
||||
metadata, None, None, None, response)
|
||||
f.close()
|
||||
status = response.get('status')
|
||||
assert (status == 200 or status == 201)
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def put_storlet_input_object(url, token):
|
||||
resp = dict()
|
||||
f = open('%s/%s' %(EXECDEP_PATH_TO_BUNDLE, EXECDEP_JUNK_FILE),'r')
|
||||
c.put_object(url, token, 'myobjects', EXECDEP_JUNK_FILE, f,
|
||||
content_type = "application/octet-stream",
|
||||
response_dict = resp)
|
||||
f = open('%s/%s' % (EXECDEP_PATH_TO_BUNDLE, EXECDEP_JUNK_FILE), 'r')
|
||||
c.put_object(url, token, 'myobjects', EXECDEP_JUNK_FILE, f,
|
||||
content_type="application/octet-stream",
|
||||
response_dict=resp)
|
||||
f.close()
|
||||
status = resp.get('status')
|
||||
status = resp.get('status')
|
||||
assert (status == 200 or status == 201)
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
def deploy_storlet(url,token, name, jarName):
|
||||
#No need to create containers every time
|
||||
#put_storlet_containers(url, token)
|
||||
|
||||
|
||||
def deploy_storlet(url, token, name, jarName):
|
||||
# No need to create containers every time
|
||||
# put_storlet_containers(url, token)
|
||||
put_storlet_object(url, token, jarName,
|
||||
'../StorletSamples/'+ name + '/bin/',
|
||||
'',
|
||||
'com.ibm.storlet.' + name.lower() + '.' + name)
|
||||
|
||||
'../StorletSamples/' + name + '/bin/',
|
||||
'',
|
||||
'com.ibm.storlet.' + name.lower() + '.' + name)
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def invoke_storlet(url, token, storletName, jarName, objectName, mode):
|
||||
resp = dict()
|
||||
if mode == 'PUT':
|
||||
f = open('../StorletSamples/' + storletName + '/sampleData.txt','r')
|
||||
f = open('../StorletSamples/' + storletName + '/sampleData.txt', 'r')
|
||||
c.put_object(url, token, 'myobjects', objectName, f,
|
||||
headers = {'X-Run-Storlet':jarName},
|
||||
response_dict = resp)
|
||||
headers={'X-Run-Storlet': jarName},
|
||||
response_dict=resp)
|
||||
f.close()
|
||||
if mode == 'GET':
|
||||
resp_headers, saved_content = c.get_object(url, token,
|
||||
'myobjects',
|
||||
objectName,
|
||||
headers = {'X-Run-Storlet':jarName},
|
||||
response_dict=resp)
|
||||
|
||||
resp_headers, saved_content = \
|
||||
c.get_object(url, token, 'myobjects', objectName,
|
||||
headers={'X-Run-Storlet': jarName},
|
||||
response_dict=resp)
|
||||
|
||||
assert (resp['status'] == 200 or resp['status'] == 201)
|
||||
|
||||
|
||||
if mode == 'GET':
|
||||
return resp_headers, saved_content
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def main():
|
||||
os_options = {'tenant_name': ACCOUNT}
|
||||
url, token = c.get_auth( 'http://' + AUTH_IP + ":"
|
||||
+ AUTH_PORT + '/v2.0',
|
||||
ACCOUNT + ':' + USER_NAME,
|
||||
PASSWORD,
|
||||
os_options = os_options,
|
||||
auth_version = '2.0' )
|
||||
os_options = {'tenant_name': ACCOUNT}
|
||||
url, token = c.get_auth('http://' + AUTH_IP + ":"
|
||||
+ AUTH_PORT + '/v2.0',
|
||||
ACCOUNT + ':' + USER_NAME,
|
||||
PASSWORD,
|
||||
os_options=os_options,
|
||||
auth_version='2.0')
|
||||
|
||||
print 'Deploying ReadHeaders storlet'
|
||||
deploy_storlet(url, token, 'ReadHeadersStorlet',
|
||||
'readheadersstorlet-1.0.jar')
|
||||
print('Deploying ReadHeaders storlet')
|
||||
deploy_storlet(url, token, 'ReadHeadersStorlet',
|
||||
'readheadersstorlet-1.0.jar')
|
||||
|
||||
print 'Deploying ReadHeaders dependency'
|
||||
put_storlet_dependency(url, token, 'json-simple-1.1.1.jar',
|
||||
'../StorletSamples/ReadHeadersStorlet/lib')
|
||||
print('Deploying ReadHeaders dependency')
|
||||
put_storlet_dependency(url, token, 'json-simple-1.1.1.jar',
|
||||
'../StorletSamples/ReadHeadersStorlet/lib')
|
||||
|
||||
print 'Deploying CSS storlet'
|
||||
deploy_storlet(url, token, 'CssStorlet', 'cssstorlet-1.0.jar')
|
||||
print('Deploying CSS storlet')
|
||||
deploy_storlet(url, token, 'CssStorlet', 'cssstorlet-1.0.jar')
|
||||
|
||||
print "Invoking CSS storlet in PUT mode"
|
||||
invoke_storlet(url, token, 'CssStorlet', 'cssstorlet-1.0.jar',
|
||||
'testDataCss', 'PUT')
|
||||
print("Invoking CSS storlet in PUT mode")
|
||||
invoke_storlet(url, token, 'CssStorlet', 'cssstorlet-1.0.jar',
|
||||
'testDataCss', 'PUT')
|
||||
|
||||
print "Invoking ReadHeaders storlet in GET mode"
|
||||
headers, content = invoke_storlet(url, token, 'ReadHeadersStorlet',
|
||||
'readheadersstorlet-1.0.jar', 'testDataCss', 'GET')
|
||||
print("Invoking ReadHeaders storlet in GET mode")
|
||||
headers, content = invoke_storlet(url, token, 'ReadHeadersStorlet',
|
||||
'readheadersstorlet-1.0.jar',
|
||||
'testDataCss', 'GET')
|
||||
|
||||
assert '{"Square-Sums":"[2770444.6455999985, 1.9458262030000027E7,' \
|
||||
+ ' 95.17999999999981]","Lines-Num":"356","Sums":"[27037.0' \
|
||||
+ '40000000008, 83229.09999999998, 168.39999999999947]"}'\
|
||||
== content
|
||||
|
||||
print "ReadHeaders test finished"
|
||||
assert '{"Square-Sums":"[2770444.6455999985, 1.9458262030000027E7,' \
|
||||
+ ' 95.17999999999981]","Lines-Num":"356","Sums":"[27037.0' \
|
||||
+ '40000000008, 83229.09999999998, 168.39999999999947]"}' \
|
||||
== content
|
||||
|
||||
print("ReadHeaders test finished")
|
||||
'''------------------------------------------------------------------------'''
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -1,152 +1,183 @@
|
||||
import os
|
||||
import sys
|
||||
'''-------------------------------------------------------------------------
|
||||
Copyright IBM Corp. 2015, 2015 All Rights Reserved
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
from sys_test_params import *
|
||||
from swiftclient import client as c
|
||||
from sys_test_params import ACCOUNT
|
||||
from sys_test_params import AUTH_IP
|
||||
from sys_test_params import AUTH_PORT
|
||||
from sys_test_params import PASSWORD
|
||||
from sys_test_params import USER_NAME
|
||||
|
||||
from storlets_test_utils import put_storlet_containers, put_storlet_object, progress, progress_ln, progress_msg
|
||||
from identity_storlet_test import IDENTITY_STORLET_NAME
|
||||
from storlets_test_utils import progress
|
||||
from storlets_test_utils import progress_ln
|
||||
from storlets_test_utils import progress_msg
|
||||
from storlets_test_utils import put_storlet_object
|
||||
|
||||
SLOIDENTITY_PATH_TO_BUNDLE ='../StorletSamples/SLOIdentityStorlet/bin'
|
||||
SLOIDENTITY_STORLET_NAME='sloidentitystorlet-1.0.jar'
|
||||
SLOIDENTITY_PATH_TO_BUNDLE = '../StorletSamples/SLOIdentityStorlet/bin'
|
||||
SLOIDENTITY_STORLET_NAME = 'sloidentitystorlet-1.0.jar'
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
# Test Constants
|
||||
#PATH_TO_BUNDLE =
|
||||
#STORLET_NAME =
|
||||
#STORLET_LOG_NAME =
|
||||
#SOURCE_FILE =
|
||||
# PATH_TO_BUNDLE =
|
||||
# STORLET_NAME =
|
||||
# STORLET_LOG_NAME =
|
||||
# SOURCE_FILE =
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
|
||||
def create_local_chunks():
|
||||
for i in range(1,10):
|
||||
for i in range(1, 10):
|
||||
progress()
|
||||
oname = '/tmp/slo_chunk_%d' % i
|
||||
f = open(oname,'w')
|
||||
f.write(''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(1048576)))
|
||||
f = open(oname, 'w')
|
||||
f.write(''.join(random.choice(string.ascii_uppercase + string.digits)
|
||||
for _ in range(1048576)))
|
||||
f.close()
|
||||
progress_ln()
|
||||
|
||||
|
||||
def delete_local_chunks():
|
||||
for i in range(1,10):
|
||||
for i in range(1, 10):
|
||||
oname = '/tmp/slo_chunk_%d' % i
|
||||
os.remove(oname)
|
||||
|
||||
|
||||
|
||||
def put_SLO(url, token):
|
||||
# Create temp files
|
||||
assembly = []
|
||||
for i in range(1,10):
|
||||
oname = '/tmp/slo_chunk_%d' % i
|
||||
f = open(oname,'r')
|
||||
for i in range(1, 10):
|
||||
oname = '/tmp/slo_chunk_%d' % i
|
||||
f = open(oname, 'r')
|
||||
content_length = None
|
||||
response = dict()
|
||||
progress()
|
||||
c.put_object(url, token, 'myobjects', oname, f,
|
||||
content_length, None, None, "application/octet-stream",
|
||||
None, None, None, None, response)
|
||||
c.put_object(url, token, 'myobjects', oname, f,
|
||||
content_length, None, None, "application/octet-stream",
|
||||
None, None, None, None, response)
|
||||
f.close()
|
||||
status = response.get('status')
|
||||
assert (status >= 200 and status < 300)
|
||||
|
||||
|
||||
headers = response.get('headers')
|
||||
segment = dict()
|
||||
segment['path'] = 'myobjects/%s' % oname
|
||||
segment['size_bytes'] = 1048576
|
||||
segment['etag'] = headers['etag']
|
||||
assembly.append(segment)
|
||||
|
||||
|
||||
content_length = None
|
||||
response = dict()
|
||||
headers = {'x-object-meta-prop1' : 'val1'}
|
||||
headers = {'x-object-meta-prop1': 'val1'}
|
||||
progress()
|
||||
c.put_object(url, token, 'myobjects', 'assembly', json.dumps(assembly),
|
||||
content_length=None, etag=None, chunk_size=None, headers=headers,
|
||||
query_string='multipart-manifest=put', response_dict=response)
|
||||
c.put_object(url, token, 'myobjects', 'assembly', json.dumps(assembly),
|
||||
content_length=None, etag=None, chunk_size=None,
|
||||
headers=headers, query_string='multipart-manifest=put',
|
||||
response_dict=response)
|
||||
status = response.get('status')
|
||||
assert (status >= 200 and status < 300)
|
||||
progress_ln()
|
||||
|
||||
|
||||
def get_SLO(url, token):
|
||||
response = dict()
|
||||
headers, body = c.get_object(url, token, 'myobjects', 'assembly', http_conn=None,
|
||||
resp_chunk_size=1048576, query_string=None, response_dict=response, headers=None)
|
||||
headers, body = c.get_object(url, token, 'myobjects', 'assembly',
|
||||
http_conn=None, resp_chunk_size=1048576,
|
||||
query_string=None, response_dict=response,
|
||||
headers=None)
|
||||
|
||||
i = 1
|
||||
for chunk in body:
|
||||
oname = '/tmp/slo_chunk_%d' % i
|
||||
f = open(oname,'r')
|
||||
file_content=f.read()
|
||||
#print '%s %s' % (chunk[:10], file_content[:10])
|
||||
#print '%d %d' % (len(chunk), len(file_content))
|
||||
f = open(oname, 'r')
|
||||
file_content = f.read()
|
||||
# print '%s %s' % (chunk[:10], file_content[:10])
|
||||
# print '%d %d' % (len(chunk), len(file_content))
|
||||
progress()
|
||||
assert(chunk == file_content)
|
||||
f.close()
|
||||
i=i+1
|
||||
i = i + 1
|
||||
progress_ln()
|
||||
|
||||
|
||||
def compare_slo_to_chunks(body):
|
||||
i = 1
|
||||
for chunk in body:
|
||||
if chunk:
|
||||
if i<10:
|
||||
if i < 10:
|
||||
progress()
|
||||
oname = '/tmp/slo_chunk_%d' % i
|
||||
f = open(oname,'r')
|
||||
file_content=f.read()
|
||||
#print '%s %s' % (chunk[:10], file_content[:10])
|
||||
#print '%d %d' % (len(chunk), len(file_content))
|
||||
f = open(oname, 'r')
|
||||
file_content = f.read()
|
||||
# print '%s %s' % (chunk[:10], file_content[:10])
|
||||
# print '%d %d' % (len(chunk), len(file_content))
|
||||
assert(chunk == file_content)
|
||||
f.close()
|
||||
i=i+1
|
||||
i = i + 1
|
||||
else:
|
||||
aux_content = ''
|
||||
for j in range(1,4):
|
||||
for j in range(1, 4):
|
||||
oname = '/tmp/aux_file%d' % j
|
||||
f = open(oname,'r')
|
||||
aux_content+=f.read()
|
||||
f = open(oname, 'r')
|
||||
aux_content += f.read()
|
||||
f.close()
|
||||
assert(chunk == aux_content)
|
||||
progress_ln()
|
||||
|
||||
|
||||
|
||||
def invoke_identity_on_get_SLO(url, token):
|
||||
metadata = {'X-Run-Storlet': IDENTITY_STORLET_NAME }
|
||||
metadata = {'X-Run-Storlet': IDENTITY_STORLET_NAME}
|
||||
response = dict()
|
||||
headers, body = c.get_object(url, token,
|
||||
'myobjects','assembly',
|
||||
query_string = None,
|
||||
'myobjects', 'assembly',
|
||||
query_string=None,
|
||||
response_dict=response,
|
||||
resp_chunk_size=1048576,
|
||||
headers=metadata)
|
||||
compare_slo_to_chunks(body)
|
||||
|
||||
|
||||
def invoke_identity_on_get_SLO_double(url, token):
|
||||
metadata = {'X-Run-Storlet': IDENTITY_STORLET_NAME }
|
||||
metadata = {'X-Run-Storlet': IDENTITY_STORLET_NAME}
|
||||
response = dict()
|
||||
headers, body = c.get_object(url, token,
|
||||
'myobjects',
|
||||
'assembly',
|
||||
query_string = 'double=true',
|
||||
query_string='double=true',
|
||||
response_dict=response,
|
||||
resp_chunk_size=2048,
|
||||
headers=metadata)
|
||||
|
||||
|
||||
i = 1
|
||||
progress()
|
||||
oname = '/tmp/slo_chunk_%d' % i
|
||||
f = open(oname,'r')
|
||||
file_content=f.read()
|
||||
|
||||
j = 0 # Count chunks in file 1...1024
|
||||
f = open(oname, 'r')
|
||||
file_content = f.read()
|
||||
|
||||
j = 0 # Count chunks in file 1...1024
|
||||
for chunk in body:
|
||||
file_fragment = file_content[j*1024:(j+1)*1024]
|
||||
file_fragment = file_content[j * 1024:(j + 1) * 1024]
|
||||
chunk_framgment_low = chunk[0:1024]
|
||||
chunk_framgment_high = chunk[1024:2048]
|
||||
assert(chunk_framgment_low == file_fragment)
|
||||
assert(chunk_framgment_high == file_fragment)
|
||||
j = j +1
|
||||
j = j + 1
|
||||
if j == 1024:
|
||||
i = i + 1
|
||||
if i == 10:
|
||||
@ -154,21 +185,22 @@ def invoke_identity_on_get_SLO_double(url, token):
|
||||
f.close()
|
||||
progress()
|
||||
oname = '/tmp/slo_chunk_%d' % i
|
||||
f = open(oname,'r')
|
||||
file_content=f.read()
|
||||
f = open(oname, 'r')
|
||||
file_content = f.read()
|
||||
j = 0
|
||||
assert i == 10
|
||||
progress_ln()
|
||||
|
||||
|
||||
def invoke_identity_on_partial_get_SLO(url, token):
|
||||
metadata = {'X-Run-Storlet': IDENTITY_STORLET_NAME }
|
||||
metadata = {'X-Run-Storlet': IDENTITY_STORLET_NAME}
|
||||
for i in range(5):
|
||||
progress()
|
||||
response = dict()
|
||||
headers, body = c.get_object(url, token,
|
||||
'myobjects',
|
||||
'assembly',
|
||||
query_string = None,
|
||||
query_string=None,
|
||||
response_dict=response,
|
||||
resp_chunk_size=1048576,
|
||||
headers=metadata)
|
||||
@ -179,43 +211,46 @@ def invoke_identity_on_partial_get_SLO(url, token):
|
||||
if j == 5:
|
||||
break
|
||||
progress_ln()
|
||||
|
||||
|
||||
# def delete_files():
|
||||
# for i in range(1,4):
|
||||
# fname = '/tmp/aux_file%d' % i
|
||||
# os.remove(fname)
|
||||
|
||||
|
||||
|
||||
def create_container(url, token, name):
|
||||
response = dict()
|
||||
c.put_container(url, token, name, headers=None, response_dict = response)
|
||||
status = response.get('status')
|
||||
c.put_container(url, token, name, headers=None, response_dict=response)
|
||||
status = response.get('status')
|
||||
assert (status >= 200 or status < 300)
|
||||
|
||||
|
||||
def deploy_sloidentity_storlet(url, token):
|
||||
progress()
|
||||
response = dict()
|
||||
c.put_container(url, token, 'mysloobject', None, None, response)
|
||||
status = response.get('status')
|
||||
status = response.get('status')
|
||||
assert (status >= 200 or status < 300)
|
||||
|
||||
progress()
|
||||
put_storlet_object( url, token,
|
||||
SLOIDENTITY_STORLET_NAME,
|
||||
SLOIDENTITY_PATH_TO_BUNDLE,
|
||||
'',
|
||||
'com.ibm.storlet.sloidentity.SLOIdentityStorlet')
|
||||
put_storlet_object(url, token,
|
||||
SLOIDENTITY_STORLET_NAME,
|
||||
SLOIDENTITY_PATH_TO_BUNDLE,
|
||||
'',
|
||||
'com.ibm.storlet.sloidentity.SLOIdentityStorlet')
|
||||
progress_ln()
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def main():
|
||||
os_options = {'tenant_name': ACCOUNT}
|
||||
url, token = c.get_auth( 'http://' + AUTH_IP + ":"
|
||||
+ AUTH_PORT + '/v2.0',
|
||||
ACCOUNT + ':' + USER_NAME,
|
||||
PASSWORD,
|
||||
os_options = os_options,
|
||||
auth_version = '2.0' )
|
||||
url, token = c.get_auth('http://' + AUTH_IP + ":"
|
||||
+ AUTH_PORT + '/v2.0',
|
||||
ACCOUNT + ':' + USER_NAME,
|
||||
PASSWORD,
|
||||
os_options=os_options,
|
||||
auth_version='2.0')
|
||||
# print('Creating containers for auxiliary files')
|
||||
create_container(url, token, 'myobjects')
|
||||
create_container(url, token, 'container1')
|
||||
@ -234,8 +269,8 @@ def main():
|
||||
progress_msg("Invoking storlet on SLO in GET with double")
|
||||
invoke_identity_on_get_SLO_double(url, token)
|
||||
|
||||
#progress_msg("Invoking storlet on SLO in partial GET")
|
||||
#invoke_identity_on_partial_get_SLO(url, token)
|
||||
# progress_msg("Invoking storlet on SLO in partial GET")
|
||||
# invoke_identity_on_partial_get_SLO(url, token)
|
||||
delete_local_chunks()
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
@ -17,80 +17,90 @@ Limitations under the License.
|
||||
@author: gilv / cdoron / evgenyl
|
||||
'''
|
||||
|
||||
from sys_test_params import *
|
||||
from storlets_test_utils import put_file_as_storlet_input_object
|
||||
from storlets_test_utils import put_storlet_object
|
||||
from swiftclient import client as c
|
||||
from sys_test_params import ACCOUNT
|
||||
from sys_test_params import AUTH_IP
|
||||
from sys_test_params import AUTH_PORT
|
||||
from sys_test_params import PASSWORD
|
||||
from sys_test_params import USER_NAME
|
||||
|
||||
from storlets_test_utils import put_storlet_object, \
|
||||
put_file_as_storlet_input_object
|
||||
|
||||
EXECDEP_PATH_TO_BUNDLE ='../StorletSamples/ExecDepStorlet/bin/'
|
||||
EXECDEP_STORLET_NAME='execdepstorlet-1.0.jar'
|
||||
EXECDEP_STORLET_LOG_NAME='execdepstorlet-1.0.log'
|
||||
EXECDEP_PATH_TO_BUNDLE = '../StorletSamples/ExecDepStorlet/bin/'
|
||||
EXECDEP_STORLET_NAME = 'execdepstorlet-1.0.jar'
|
||||
EXECDEP_STORLET_LOG_NAME = 'execdepstorlet-1.0.log'
|
||||
EXECDEP_JUNK_FILE = 'junk.txt'
|
||||
EXECDEP_DEPS_NAMES=['get42']
|
||||
EXECDEP_DEPS_NAMES = ['get42']
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def put_storlet_executable_dependencies(url, token):
|
||||
resp = dict()
|
||||
for d in EXECDEP_DEPS_NAMES:
|
||||
for d in EXECDEP_DEPS_NAMES:
|
||||
metadata = {'X-Object-Meta-Storlet-Dependency-Version': '1',
|
||||
'X-Object-Meta-Storlet-Dependency-Permissions': '0755' }
|
||||
|
||||
f = open('%s/%s' %(EXECDEP_PATH_TO_BUNDLE, d),'r')
|
||||
c.put_object(url, token, 'dependency', d, f,
|
||||
content_type = "application/octet-stream",
|
||||
headers = metadata,
|
||||
response_dict = resp)
|
||||
'X-Object-Meta-Storlet-Dependency-Permissions': '0755'}
|
||||
|
||||
f = open('%s/%s' % (EXECDEP_PATH_TO_BUNDLE, d), 'r')
|
||||
c.put_object(url, token, 'dependency', d, f,
|
||||
content_type="application/octet-stream",
|
||||
headers=metadata,
|
||||
response_dict=resp)
|
||||
f.close()
|
||||
status = resp.get('status')
|
||||
status = resp.get('status')
|
||||
assert (status == 200 or status == 201)
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
def deploy_storlet(url,token):
|
||||
#No need to create containers every time
|
||||
#put_storlet_containers(url, token)
|
||||
put_storlet_object( url, token,
|
||||
EXECDEP_STORLET_NAME,
|
||||
EXECDEP_PATH_TO_BUNDLE,
|
||||
','.join( str(x) for x in EXECDEP_DEPS_NAMES),
|
||||
'com.ibm.storlet.execdep.ExecDepStorlet')
|
||||
|
||||
|
||||
def deploy_storlet(url, token):
|
||||
# No need to create containers every time
|
||||
# put_storlet_containers(url, token)
|
||||
put_storlet_object(url, token,
|
||||
EXECDEP_STORLET_NAME,
|
||||
EXECDEP_PATH_TO_BUNDLE,
|
||||
','.join(str(x) for x in EXECDEP_DEPS_NAMES),
|
||||
'com.ibm.storlet.execdep.ExecDepStorlet')
|
||||
put_storlet_executable_dependencies(url, token)
|
||||
put_file_as_storlet_input_object(url,
|
||||
token,
|
||||
EXECDEP_PATH_TO_BUNDLE,
|
||||
EXECDEP_JUNK_FILE )
|
||||
|
||||
put_file_as_storlet_input_object(url,
|
||||
token,
|
||||
EXECDEP_PATH_TO_BUNDLE,
|
||||
EXECDEP_JUNK_FILE)
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def invoke_storlet(url, token):
|
||||
metadata = {'X-Run-Storlet': EXECDEP_STORLET_NAME }
|
||||
metadata = {'X-Run-Storlet': EXECDEP_STORLET_NAME}
|
||||
resp = dict()
|
||||
resp_headers, gf = c.get_object(url, token,
|
||||
'myobjects',
|
||||
EXECDEP_JUNK_FILE,
|
||||
response_dict=resp,
|
||||
headers=metadata)
|
||||
|
||||
resp_headers, gf = c.get_object(url, token,
|
||||
'myobjects',
|
||||
EXECDEP_JUNK_FILE,
|
||||
response_dict=resp,
|
||||
headers=metadata)
|
||||
|
||||
assert 'x-object-meta-depend-ret-code' in resp_headers
|
||||
assert resp_headers['x-object-meta-depend-ret-code'] == '42'
|
||||
assert resp['status'] == 200
|
||||
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def main():
|
||||
os_options = {'tenant_name': ACCOUNT}
|
||||
url, token = c.get_auth( 'http://' + AUTH_IP + ":"
|
||||
+ AUTH_PORT + '/v2.0',
|
||||
ACCOUNT + ':' + USER_NAME,
|
||||
PASSWORD,
|
||||
os_options = os_options,
|
||||
auth_version = '2.0' )
|
||||
|
||||
print 'Deploying ExecDep storlet and dependencies'
|
||||
url, token = c.get_auth('http://' + AUTH_IP + ":"
|
||||
+ AUTH_PORT + '/v2.0',
|
||||
ACCOUNT + ':' + USER_NAME,
|
||||
PASSWORD,
|
||||
os_options=os_options,
|
||||
auth_version='2.0')
|
||||
|
||||
print('Deploying ExecDep storlet and dependencies')
|
||||
deploy_storlet(url, token)
|
||||
|
||||
print "Invoking ExecDep storlet"
|
||||
|
||||
print("Invoking ExecDep storlet")
|
||||
invoke_storlet(url, token)
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -13,132 +13,141 @@ See the License for the specific language governing permissions and
|
||||
Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
|
||||
import os
|
||||
import json
|
||||
import random
|
||||
import string
|
||||
from sys_test_params import *
|
||||
from swiftclient import client as c
|
||||
from sys_test_params import ACCOUNT
|
||||
from sys_test_params import AUTH_IP
|
||||
from sys_test_params import AUTH_PORT
|
||||
from sys_test_params import PASSWORD
|
||||
from sys_test_params import USER_NAME
|
||||
|
||||
from storlets_test_utils import put_storlet_containers, put_storlet_object, \
|
||||
progress, progress_ln, progress_msg
|
||||
from storlets_test_utils import put_storlet_object
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
# Test Constants
|
||||
HALF_PATH_TO_BUNDLE ='../StorletSamples/HalfStorlet/bin/'
|
||||
HALF_STORLET_NAME='halfstorlet-1.0.jar'
|
||||
HALF_PATH_TO_BUNDLE = '../StorletSamples/HalfStorlet/bin/'
|
||||
HALF_STORLET_NAME = 'halfstorlet-1.0.jar'
|
||||
HALF_SOURCE_FILE = 'source.txt'
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def put_storlet_input_object(url, token):
|
||||
resp = dict()
|
||||
metadata = {'X-Object-Meta-Testkey':'tester'}
|
||||
f = open('%s/%s' %(HALF_PATH_TO_BUNDLE, HALF_SOURCE_FILE),'r')
|
||||
c.put_object(url, token, 'myobjects', HALF_SOURCE_FILE, f,
|
||||
content_type = "application/octet-stream",
|
||||
headers = metadata,
|
||||
response_dict = resp)
|
||||
metadata = {'X-Object-Meta-Testkey': 'tester'}
|
||||
f = open('%s/%s' % (HALF_PATH_TO_BUNDLE, HALF_SOURCE_FILE), 'r')
|
||||
c.put_object(url, token, 'myobjects', HALF_SOURCE_FILE, f,
|
||||
content_type="application/octet-stream",
|
||||
headers=metadata,
|
||||
response_dict=resp)
|
||||
f.close()
|
||||
status = resp.get('status')
|
||||
status = resp.get('status')
|
||||
assert (status == 200 or status == 201)
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
def deploy_storlet(url,token):
|
||||
#No need to create containers every time
|
||||
#put_storlet_containers(url, token)
|
||||
put_storlet_object( url, token,
|
||||
HALF_STORLET_NAME,
|
||||
HALF_PATH_TO_BUNDLE,
|
||||
'',
|
||||
'com.ibm.storlet.half.HalfStorlet')
|
||||
put_storlet_input_object( url, token )
|
||||
|
||||
|
||||
|
||||
def deploy_storlet(url, token):
|
||||
# No need to create containers every time
|
||||
# put_storlet_containers(url, token)
|
||||
put_storlet_object(url, token,
|
||||
HALF_STORLET_NAME,
|
||||
HALF_PATH_TO_BUNDLE,
|
||||
'',
|
||||
'com.ibm.storlet.half.HalfStorlet')
|
||||
put_storlet_input_object(url, token)
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
def invoke_storlet(url, token, op, params = None, global_params = None, headers = None):
|
||||
if params != None:
|
||||
querystring=''
|
||||
|
||||
|
||||
def invoke_storlet(url, token, op, params=None, global_params=None,
|
||||
headers=None):
|
||||
if params is not None:
|
||||
querystring = ''
|
||||
for key in params:
|
||||
querystring += '%s=%s,' % (key, params[key])
|
||||
querystring = querystring[:-1]
|
||||
else:
|
||||
querystring = None
|
||||
|
||||
|
||||
metadata = {'X-Run-Storlet': HALF_STORLET_NAME}
|
||||
if headers:
|
||||
metadata.update(headers)
|
||||
|
||||
if op == 'GET':
|
||||
# Get original object
|
||||
original_headers, original_content = c.get_object(url, token,
|
||||
'myobjects',
|
||||
HALF_SOURCE_FILE,
|
||||
response_dict=dict())
|
||||
#print original_headers
|
||||
original_headers, original_content = \
|
||||
c.get_object(url, token, 'myobjects', HALF_SOURCE_FILE,
|
||||
response_dict=dict())
|
||||
# print original_headers
|
||||
file_length = int(original_headers['content-length'])
|
||||
processed_headers, returned_content = c.get_object(url, token,
|
||||
'myobjects',
|
||||
HALF_SOURCE_FILE,
|
||||
query_string = querystring,
|
||||
response_dict=dict(),
|
||||
headers=metadata,
|
||||
resp_chunk_size = file_length)
|
||||
processed_headers, returned_content = \
|
||||
c.get_object(url, token, 'myobjects', HALF_SOURCE_FILE,
|
||||
query_string=querystring, response_dict=dict(),
|
||||
headers=metadata, resp_chunk_size=file_length)
|
||||
processed_content = ''
|
||||
for chunk in returned_content:
|
||||
if chunk:
|
||||
processed_content+=chunk
|
||||
|
||||
assert(original_headers['X-Object-Meta-Testkey'.lower()] == processed_headers['X-Object-Meta-Testkey'.lower()])
|
||||
processed_content += chunk
|
||||
|
||||
assert(original_headers['X-Object-Meta-Testkey'.lower()] ==
|
||||
processed_headers['X-Object-Meta-Testkey'.lower()])
|
||||
return processed_content
|
||||
|
||||
|
||||
if op == 'PUT':
|
||||
# PUT a random file
|
||||
response = dict()
|
||||
uploaded_content = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(1024))
|
||||
random_md = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(32))
|
||||
#content_length = 1024
|
||||
uploaded_content = ''.join(random.choice(string.ascii_uppercase +
|
||||
string.digits) for _ in range(1024))
|
||||
random_md = ''.join(random.choice(string.ascii_uppercase +
|
||||
string.digits) for _ in range(32))
|
||||
# content_length = 1024
|
||||
content_length = None
|
||||
headers = {'X-Run-Storlet': HALF_STORLET_NAME,
|
||||
'X-Object-Meta-Testkey' : random_md }
|
||||
c.put_object(url, token, 'myobjects', 'half_random_source', uploaded_content,
|
||||
content_length, None, None, "application/octet-stream",
|
||||
headers, None, None, querystring, response)
|
||||
resp_headers, saved_content = c.get_object(url, token,
|
||||
'myobjects',
|
||||
'half_random_source',
|
||||
response_dict=dict())
|
||||
|
||||
if params != None and params.get('double',None) == 'true':
|
||||
assert(uploaded_content==saved_content[:1024])
|
||||
assert(uploaded_content==saved_content[1024:])
|
||||
headers = {'X-Run-Storlet': HALF_STORLET_NAME,
|
||||
'X-Object-Meta-Testkey': random_md}
|
||||
c.put_object(url, token, 'myobjects', 'half_random_source',
|
||||
uploaded_content, content_length, None, None,
|
||||
"application/octet-stream", headers, None, None,
|
||||
querystring, response)
|
||||
resp_headers, saved_content = c.get_object(url, token, 'myobjects',
|
||||
'half_random_source',
|
||||
response_dict=dict())
|
||||
|
||||
if params is not None and params.get('double', None) == 'true':
|
||||
assert(uploaded_content == saved_content[:1024])
|
||||
assert(uploaded_content == saved_content[1024:])
|
||||
else:
|
||||
assert(uploaded_content == saved_content)
|
||||
|
||||
if params != None and params.get('execute',None) != None:
|
||||
assert(resp_headers['X-Object-Meta-Execution result'.lower()] == '42')
|
||||
|
||||
assert(resp_headers['X-Object-Meta-Testkey'.lower()] == random_md)
|
||||
assert(uploaded_content == saved_content)
|
||||
|
||||
if params is not None and params.get('execute', None) is not None:
|
||||
assert(resp_headers['X-Object-Meta-Execution result'.lower()] ==
|
||||
'42')
|
||||
|
||||
assert(resp_headers['X-Object-Meta-Testkey'.lower()] == random_md)
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def main():
|
||||
os_options = {'tenant_name': ACCOUNT}
|
||||
url, token = c.get_auth( 'http://' + AUTH_IP + ":"
|
||||
+ AUTH_PORT + '/v2.0',
|
||||
ACCOUNT + ':' + USER_NAME,
|
||||
PASSWORD,
|
||||
os_options = os_options,
|
||||
auth_version = '2.0' )
|
||||
|
||||
print 'Deploying Half storlet and dependencies'
|
||||
|
||||
url, token = c.get_auth('http://' + AUTH_IP + ":"
|
||||
+ AUTH_PORT + '/v2.0',
|
||||
ACCOUNT + ':' + USER_NAME,
|
||||
PASSWORD,
|
||||
os_options=os_options,
|
||||
auth_version='2.0')
|
||||
|
||||
print('Deploying Half storlet and dependencies')
|
||||
|
||||
deploy_storlet(url, token)
|
||||
|
||||
print "Invoking Half storlet on GET"
|
||||
assert (invoke_storlet(url, token,'GET') == 'acegikmn')
|
||||
print "Invoking Half storlet on GET with byte ranges"
|
||||
assert (invoke_storlet(url, token,'GET', headers = {'range': 'bytes=5-10'}) == 'fhj')
|
||||
|
||||
|
||||
print("Invoking Half storlet on GET")
|
||||
assert (invoke_storlet(url, token, 'GET') == 'acegikmn')
|
||||
print("Invoking Half storlet on GET with byte ranges")
|
||||
assert (invoke_storlet(url, token, 'GET',
|
||||
headers={'range': 'bytes=5-10'}) == 'fhj')
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -14,185 +14,200 @@ Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
|
||||
import os
|
||||
import json
|
||||
import random
|
||||
import string
|
||||
from sys_test_params import *
|
||||
from swiftclient import client as c
|
||||
from sys_test_params import ACCOUNT
|
||||
from sys_test_params import AUTH_IP
|
||||
from sys_test_params import AUTH_PORT
|
||||
from sys_test_params import PASSWORD
|
||||
from sys_test_params import USER_NAME
|
||||
|
||||
from storlets_test_utils import put_storlet_containers, put_storlet_object, \
|
||||
progress, progress_ln, progress_msg
|
||||
from storlets_test_utils import progress
|
||||
from storlets_test_utils import progress_ln
|
||||
from storlets_test_utils import progress_msg
|
||||
from storlets_test_utils import put_storlet_object
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
# Test Constants
|
||||
IDENTITY_PATH_TO_BUNDLE ='../StorletSamples/IdentityStorlet/bin/'
|
||||
IDENTITY_STORLET_NAME='identitystorlet-1.0.jar'
|
||||
IDENTITY_STORLET_LOG_NAME='identitystorlet-1.0.log'
|
||||
IDENTITY_PATH_TO_BUNDLE = '../StorletSamples/IdentityStorlet/bin/'
|
||||
IDENTITY_STORLET_NAME = 'identitystorlet-1.0.jar'
|
||||
IDENTITY_STORLET_LOG_NAME = 'identitystorlet-1.0.log'
|
||||
IDENTITY_SOURCE_FILE = 'source.txt'
|
||||
IDENTITY_DEPS_NAMES=['get42']
|
||||
IDENTITY_DEPS_NAMES = ['get42']
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def put_storlet_executable_dependencies(url, token):
|
||||
resp = dict()
|
||||
for d in IDENTITY_DEPS_NAMES:
|
||||
for d in IDENTITY_DEPS_NAMES:
|
||||
metadata = {'X-Object-Meta-Storlet-Dependency-Version': '1',
|
||||
'X-Object-Meta-Storlet-Dependency-Permissions': '0755' }
|
||||
|
||||
f = open('%s/%s' %(IDENTITY_PATH_TO_BUNDLE, d),'r')
|
||||
c.put_object(url, token, 'dependency', d, f,
|
||||
content_type = "application/octet-stream",
|
||||
headers = metadata,
|
||||
response_dict = resp)
|
||||
'X-Object-Meta-Storlet-Dependency-Permissions': '0755'}
|
||||
|
||||
f = open('%s/%s' % (IDENTITY_PATH_TO_BUNDLE, d), 'r')
|
||||
c.put_object(url, token, 'dependency', d, f,
|
||||
content_type="application/octet-stream",
|
||||
headers=metadata,
|
||||
response_dict=resp)
|
||||
f.close()
|
||||
status = resp.get('status')
|
||||
status = resp.get('status')
|
||||
assert (status == 200 or status == 201)
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def put_storlet_input_object(url, token):
|
||||
resp = dict()
|
||||
metadata = {'X-Object-Meta-Testkey':'tester'}
|
||||
f = open('%s/%s' %(IDENTITY_PATH_TO_BUNDLE, IDENTITY_SOURCE_FILE),'r')
|
||||
c.put_object(url, token, 'myobjects', IDENTITY_SOURCE_FILE, f,
|
||||
content_type = "application/octet-stream",
|
||||
headers = metadata,
|
||||
response_dict = resp)
|
||||
metadata = {'X-Object-Meta-Testkey': 'tester'}
|
||||
f = open('%s/%s' % (IDENTITY_PATH_TO_BUNDLE, IDENTITY_SOURCE_FILE), 'r')
|
||||
c.put_object(url, token, 'myobjects', IDENTITY_SOURCE_FILE, f,
|
||||
content_type="application/octet-stream",
|
||||
headers=metadata,
|
||||
response_dict=resp)
|
||||
f.close()
|
||||
status = resp.get('status')
|
||||
status = resp.get('status')
|
||||
assert (status == 200 or status == 201)
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
def deploy_storlet(url,token):
|
||||
#No need to create containers every time
|
||||
#put_storlet_containers(url, token)
|
||||
put_storlet_object( url, token,
|
||||
IDENTITY_STORLET_NAME,
|
||||
IDENTITY_PATH_TO_BUNDLE,
|
||||
','.join( str(x) for x in IDENTITY_DEPS_NAMES),
|
||||
'com.ibm.storlet.identity.IdentityStorlet')
|
||||
|
||||
|
||||
def deploy_storlet(url, token):
|
||||
# No need to create containers every time
|
||||
# put_storlet_containers(url, token)
|
||||
put_storlet_object(url, token,
|
||||
IDENTITY_STORLET_NAME,
|
||||
IDENTITY_PATH_TO_BUNDLE,
|
||||
','.join(str(x) for x in IDENTITY_DEPS_NAMES),
|
||||
'com.ibm.storlet.identity.IdentityStorlet')
|
||||
put_storlet_executable_dependencies(url, token)
|
||||
put_storlet_input_object( url, token )
|
||||
|
||||
put_storlet_input_object(url, token)
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def invoke_storlet_on_1GB_file(url, token):
|
||||
GBFile = open('/tmp/1GB_file','w')
|
||||
GBFile = open('/tmp/1GB_file', 'w')
|
||||
for _ in range(128):
|
||||
progress()
|
||||
uploaded_content = ''.join('1' for _ in range(8*1024*1024))
|
||||
uploaded_content = ''.join('1' for _ in range(8 * 1024 * 1024))
|
||||
GBFile.write(uploaded_content)
|
||||
GBFile.close()
|
||||
|
||||
headers = {'X-Run-Storlet': IDENTITY_STORLET_NAME }
|
||||
GBFile = open('/tmp/1GB_file','r')
|
||||
response=dict()
|
||||
|
||||
headers = {'X-Run-Storlet': IDENTITY_STORLET_NAME}
|
||||
GBFile = open('/tmp/1GB_file', 'r')
|
||||
response = dict()
|
||||
progress()
|
||||
c.put_object(url, token, 'myobjects', '1GBFile', GBFile,
|
||||
1024*1024*1024, None, None, "application/octet-stream",
|
||||
c.put_object(url, token, 'myobjects', '1GBFile', GBFile,
|
||||
1024 * 1024 * 1024, None, None, "application/octet-stream",
|
||||
headers, None, None, None, response)
|
||||
progress()
|
||||
status = response.get('status')
|
||||
status = response.get('status')
|
||||
assert (status == 200 or status == 201)
|
||||
progress()
|
||||
os.remove('/tmp/1GB_file')
|
||||
progress_ln()
|
||||
|
||||
|
||||
def invoke_storlet(url, token, op, params = None, global_params = None):
|
||||
if params != None:
|
||||
querystring=''
|
||||
|
||||
|
||||
def invoke_storlet(url, token, op, params=None, global_params=None):
|
||||
if params is not None:
|
||||
querystring = ''
|
||||
for key in params:
|
||||
querystring += '%s=%s,' % (key, params[key])
|
||||
querystring = querystring[:-1]
|
||||
else:
|
||||
querystring = None
|
||||
|
||||
|
||||
metadata = {'X-Run-Storlet': IDENTITY_STORLET_NAME}
|
||||
if op == 'GET':
|
||||
# Get original object
|
||||
original_headers, original_content = c.get_object(url, token,
|
||||
'myobjects',
|
||||
IDENTITY_SOURCE_FILE,
|
||||
response_dict=dict())
|
||||
#print original_headers
|
||||
original_headers, original_content = \
|
||||
c.get_object(url, token, 'myobjects', IDENTITY_SOURCE_FILE,
|
||||
response_dict=dict())
|
||||
# print original_headers
|
||||
file_length = int(original_headers['content-length'])
|
||||
processed_headers, returned_content = c.get_object(url, token,
|
||||
'myobjects',
|
||||
IDENTITY_SOURCE_FILE,
|
||||
query_string = querystring,
|
||||
response_dict=dict(),
|
||||
headers=metadata,
|
||||
resp_chunk_size = file_length)
|
||||
processed_headers, returned_content = \
|
||||
c.get_object(url, token, 'myobjects', IDENTITY_SOURCE_FILE,
|
||||
query_string=querystring, response_dict=dict(),
|
||||
headers=metadata, resp_chunk_size=file_length)
|
||||
processed_content = ''
|
||||
for chunk in returned_content:
|
||||
if chunk:
|
||||
processed_content+=chunk
|
||||
|
||||
if params != None and params.get('execute',None) != None:
|
||||
assert(processed_headers['X-Object-Meta-Execution result'.lower()] == '42')
|
||||
if params != None and params.get('double',None) == 'true':
|
||||
assert(original_content==processed_content[:file_length])
|
||||
assert(original_content==processed_content[file_length:])
|
||||
processed_content += chunk
|
||||
|
||||
if params is not None and params.get('execute', None) is not None:
|
||||
assert(processed_headers['X-Object-Meta-Execution result'.lower()]
|
||||
== '42')
|
||||
if params is not None and params.get('double', None) == 'true':
|
||||
assert(original_content == processed_content[:file_length])
|
||||
assert(original_content == processed_content[file_length:])
|
||||
else:
|
||||
assert(original_content == processed_content)
|
||||
assert(original_headers['X-Object-Meta-Testkey'.lower()] == processed_headers['X-Object-Meta-Testkey'.lower()])
|
||||
|
||||
assert(original_headers['X-Object-Meta-Testkey'.lower()] ==
|
||||
processed_headers['X-Object-Meta-Testkey'.lower()])
|
||||
|
||||
if op == 'PUT':
|
||||
# PUT a random file
|
||||
response = dict()
|
||||
uploaded_content = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(1024))
|
||||
random_md = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(32))
|
||||
#content_length = 1024
|
||||
uploaded_content = ''.join(random.choice(string.ascii_uppercase +
|
||||
string.digits) for _ in range(1024))
|
||||
random_md = ''.join(random.choice(string.ascii_uppercase +
|
||||
string.digits) for _ in range(32))
|
||||
content_length = None
|
||||
headers = {'X-Run-Storlet': IDENTITY_STORLET_NAME,
|
||||
'X-Object-Meta-Testkey' : random_md }
|
||||
c.put_object(url, token, 'myobjects', 'identity_random_source', uploaded_content,
|
||||
content_length, None, None, "application/octet-stream",
|
||||
headers, None, None, querystring, response)
|
||||
resp_headers, saved_content = c.get_object(url, token,
|
||||
'myobjects',
|
||||
'identity_random_source',
|
||||
response_dict=dict())
|
||||
|
||||
if params != None and params.get('double',None) == 'true':
|
||||
assert(uploaded_content==saved_content[:1024])
|
||||
assert(uploaded_content==saved_content[1024:])
|
||||
headers = {'X-Run-Storlet': IDENTITY_STORLET_NAME,
|
||||
'X-Object-Meta-Testkey': random_md}
|
||||
c.put_object(url, token, 'myobjects', 'identity_random_source',
|
||||
uploaded_content, content_length, None, None,
|
||||
"application/octet-stream", headers, None, None,
|
||||
querystring, response)
|
||||
resp_headers, saved_content = c.get_object(url, token,
|
||||
'myobjects',
|
||||
'identity_random_source',
|
||||
response_dict=dict())
|
||||
|
||||
if params is not None and params.get('double', None) == 'true':
|
||||
assert(uploaded_content == saved_content[:1024])
|
||||
assert(uploaded_content == saved_content[1024:])
|
||||
else:
|
||||
assert(uploaded_content == saved_content)
|
||||
|
||||
if params != None and params.get('execute',None) != None:
|
||||
assert(resp_headers['X-Object-Meta-Execution result'.lower()] == '42')
|
||||
|
||||
assert(resp_headers['X-Object-Meta-Testkey'.lower()] == random_md)
|
||||
assert(uploaded_content == saved_content)
|
||||
|
||||
if params is not None and params.get('execute', None) is not None:
|
||||
assert(resp_headers['X-Object-Meta-Execution result'.lower()] ==
|
||||
'42')
|
||||
|
||||
assert(resp_headers['X-Object-Meta-Testkey'.lower()] == random_md)
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def main():
|
||||
os_options = {'tenant_name': ACCOUNT}
|
||||
url, token = c.get_auth( 'http://' + AUTH_IP + ":"
|
||||
+ AUTH_PORT + '/v2.0',
|
||||
ACCOUNT + ':' + USER_NAME,
|
||||
PASSWORD,
|
||||
os_options = os_options,
|
||||
auth_version = '2.0' )
|
||||
|
||||
print 'Deploying Identity storlet and dependencies'
|
||||
|
||||
url, token = c.get_auth('http://' + AUTH_IP + ":" +
|
||||
AUTH_PORT + '/v2.0',
|
||||
ACCOUNT + ':' + USER_NAME,
|
||||
PASSWORD,
|
||||
os_options=os_options,
|
||||
auth_version='2.0')
|
||||
|
||||
print('Deploying Identity storlet and dependencies')
|
||||
|
||||
deploy_storlet(url, token)
|
||||
|
||||
print "Invoking Identity storlet on PUT"
|
||||
invoke_storlet(url, token,'PUT')
|
||||
|
||||
print("Invoking Identity storlet on PUT")
|
||||
invoke_storlet(url, token, 'PUT')
|
||||
progress_msg("Invoking Identity storlet on 1GB file PUT")
|
||||
invoke_storlet_on_1GB_file(url, token)
|
||||
print "Invoking Identity storlet on PUT with execution of dependency"
|
||||
invoke_storlet(url, token,'PUT', {'execute' : 'true'})
|
||||
print "Invoking Identity storlet on PUT with double"
|
||||
invoke_storlet(url, token,'PUT', {'double' : 'true'})
|
||||
print "Invoking Identity storlet on GET"
|
||||
invoke_storlet(url, token,'GET')
|
||||
print "Invoking Identity storlet on GET with double"
|
||||
invoke_storlet(url, token,'GET', {'double' : 'true'})
|
||||
print "Invoking Identity storlet on GET with execution of dependency"
|
||||
invoke_storlet(url, token,'GET',{'execute' : 'true'})
|
||||
|
||||
invoke_storlet_on_1GB_file(url, token)
|
||||
print("Invoking Identity storlet on PUT with execution of dependency")
|
||||
invoke_storlet(url, token, 'PUT', {'execute': 'true'})
|
||||
print("Invoking Identity storlet on PUT with double")
|
||||
invoke_storlet(url, token, 'PUT', {'double': 'true'})
|
||||
print("Invoking Identity storlet on GET")
|
||||
invoke_storlet(url, token, 'GET')
|
||||
print("Invoking Identity storlet on GET with double")
|
||||
invoke_storlet(url, token, 'GET', {'double': 'true'})
|
||||
print("Invoking Identity storlet on GET with execution of dependency")
|
||||
invoke_storlet(url, token, 'GET', {'execute': 'true'})
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -13,71 +13,76 @@ See the License for the specific language governing permissions and
|
||||
Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
|
||||
import json
|
||||
import random
|
||||
import string
|
||||
from sys_test_params import *
|
||||
from swiftclient import client as c
|
||||
from sys_test_params import ACCOUNT
|
||||
from sys_test_params import AUTH_IP
|
||||
from sys_test_params import AUTH_PORT
|
||||
from sys_test_params import PASSWORD
|
||||
from sys_test_params import USER_NAME
|
||||
|
||||
from storlets_test_utils import put_storlet_containers, put_storlet_object
|
||||
from storlets_test_utils import put_storlet_object
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
# Test Constants
|
||||
PATH_TO_BUNDLE ='../StorletSamples/TestMetadataStorlet/bin/'
|
||||
STORLET_NAME='testmetadatastorlet-1.0.jar'
|
||||
STORLET_LOG_NAME='testmetadatastorlet-1.0.log'
|
||||
PATH_TO_BUNDLE = '../StorletSamples/TestMetadataStorlet/bin/'
|
||||
STORLET_NAME = 'testmetadatastorlet-1.0.jar'
|
||||
STORLET_LOG_NAME = 'testmetadatastorlet-1.0.log'
|
||||
SOURCE_FILE = 'source.txt'
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def put_storlet_input_object(url, token):
|
||||
resp = dict()
|
||||
metadata = {'X-Object-Meta-key1':'1',
|
||||
'X-Object-Meta-key2':'2',
|
||||
'X-Object-Meta-key3':'3',
|
||||
'X-Object-Meta-key4':'4',
|
||||
'X-Object-Meta-key5':'5',
|
||||
'X-Object-Meta-key6':'6',
|
||||
'X-Object-Meta-key7':'7',
|
||||
'X-Object-Meta-key8':'8',
|
||||
'X-Object-Meta-key9':'9',
|
||||
'X-Object-Meta-key10':'10'}
|
||||
f = open('%s/%s' %(PATH_TO_BUNDLE, SOURCE_FILE),'r')
|
||||
c.put_object(url, token, 'myobjects', SOURCE_FILE, f,
|
||||
content_type = "application/octet-stream",
|
||||
headers = metadata,
|
||||
response_dict = resp)
|
||||
metadata = {'X-Object-Meta-key1': '1',
|
||||
'X-Object-Meta-key2': '2',
|
||||
'X-Object-Meta-key3': '3',
|
||||
'X-Object-Meta-key4': '4',
|
||||
'X-Object-Meta-key5': '5',
|
||||
'X-Object-Meta-key6': '6',
|
||||
'X-Object-Meta-key7': '7',
|
||||
'X-Object-Meta-key8': '8',
|
||||
'X-Object-Meta-key9': '9',
|
||||
'X-Object-Meta-key10': '10'}
|
||||
f = open('%s/%s' % (PATH_TO_BUNDLE, SOURCE_FILE), 'r')
|
||||
c.put_object(url, token, 'myobjects', SOURCE_FILE, f,
|
||||
content_type="application/octet-stream",
|
||||
headers=metadata,
|
||||
response_dict=resp)
|
||||
f.close()
|
||||
status = resp.get('status')
|
||||
status = resp.get('status')
|
||||
assert (status == 200 or status == 201)
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
def deploy_storlet(url,token):
|
||||
#No need to create containers every time
|
||||
#put_storlet_containers(url, token)
|
||||
put_storlet_object( url, token,
|
||||
STORLET_NAME,
|
||||
PATH_TO_BUNDLE,
|
||||
'',
|
||||
'com.ibm.storlet.testmetadatastorlet.MetadataStorlet')
|
||||
put_storlet_input_object( url, token )
|
||||
|
||||
|
||||
|
||||
def deploy_storlet(url, token):
|
||||
# No need to create containers every time
|
||||
# put_storlet_containers(url, token)
|
||||
put_storlet_object(url, token,
|
||||
STORLET_NAME,
|
||||
PATH_TO_BUNDLE,
|
||||
'',
|
||||
'com.ibm.storlet.testmetadatastorlet.MetadataStorlet')
|
||||
put_storlet_input_object(url, token)
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
def invoke_storlet(url, token,op, params = None, global_params = None):
|
||||
if params != None:
|
||||
querystring=''
|
||||
|
||||
|
||||
def invoke_storlet(url, token, op, params=None, global_params=None):
|
||||
if params is not None:
|
||||
querystring = ''
|
||||
for key in params:
|
||||
querystring += '%s=%s,' % (key, params[key])
|
||||
querystring = querystring[:-1]
|
||||
else:
|
||||
querystring = None
|
||||
|
||||
metadata = {'X-Run-Storlet': STORLET_NAME }
|
||||
|
||||
metadata = {'X-Run-Storlet': STORLET_NAME}
|
||||
if op == 'GET':
|
||||
# Get original object
|
||||
original_headers, original_content = c.get_object(url, token,
|
||||
'myobjects',
|
||||
SOURCE_FILE,
|
||||
response_dict=dict(),
|
||||
headers=metadata)
|
||||
original_headers, original_content = \
|
||||
c.get_object(url, token, 'myobjects', SOURCE_FILE,
|
||||
response_dict=dict(), headers=metadata)
|
||||
assert(original_headers['X-Object-Meta-key1'.lower()] == '1')
|
||||
assert(original_headers['X-Object-Meta-key2'.lower()] == '2')
|
||||
assert(original_headers['X-Object-Meta-key3'.lower()] == '3')
|
||||
@ -88,24 +93,27 @@ def invoke_storlet(url, token,op, params = None, global_params = None):
|
||||
assert(original_headers['X-Object-Meta-key8'.lower()] == '8')
|
||||
assert(original_headers['X-Object-Meta-key9'.lower()] == '9')
|
||||
assert(original_headers['X-Object-Meta-key10'.lower()] == '10')
|
||||
assert(original_headers['X-Object-Meta-override_key'.lower()] == 'new_value')
|
||||
|
||||
assert(original_headers['X-Object-Meta-override_key'.lower()] ==
|
||||
'new_value')
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def main():
|
||||
os_options = {'tenant_name': ACCOUNT}
|
||||
url, token = c.get_auth( 'http://' + AUTH_IP + ":"
|
||||
+ AUTH_PORT + '/v2.0',
|
||||
ACCOUNT + ':' + USER_NAME,
|
||||
PASSWORD,
|
||||
os_options = os_options,
|
||||
auth_version = '2.0' )
|
||||
|
||||
print 'Deploying storlet and dependencies'
|
||||
url, token = c.get_auth('http://' + AUTH_IP + ":"
|
||||
+ AUTH_PORT + '/v2.0',
|
||||
ACCOUNT + ':' + USER_NAME,
|
||||
PASSWORD,
|
||||
os_options=os_options,
|
||||
auth_version='2.0')
|
||||
|
||||
print('Deploying storlet and dependencies')
|
||||
deploy_storlet(url, token)
|
||||
|
||||
print "Invoking storlet on GET"
|
||||
invoke_storlet(url, token,'GET')
|
||||
|
||||
|
||||
print("Invoking storlet on GET")
|
||||
invoke_storlet(url, token, 'GET')
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -14,95 +14,107 @@ Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
|
||||
import sys
|
||||
import json
|
||||
|
||||
from swiftclient import client as c
|
||||
from swiftclient.client import encode_utf8, http_connection
|
||||
|
||||
|
||||
def progress():
|
||||
sys.stdout.write('.')
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def progress_ln():
|
||||
sys.stdout.write('\n')
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def progress_msg(msg):
|
||||
sys.stdout.write(msg)
|
||||
sys.stdout.flush()
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
def enable_account_for_storlets(url,token):
|
||||
|
||||
|
||||
def enable_account_for_storlets(url, token):
|
||||
headers = dict()
|
||||
headers['X-Account-Meta-storlet-enabled'] = 'True'
|
||||
c.post_account(url, token, headers)
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
def put_storlet_containers(url,token):
|
||||
|
||||
|
||||
def put_storlet_containers(url, token):
|
||||
|
||||
response = dict()
|
||||
c.put_container(url, token, 'storlet', None, None, response)
|
||||
status = response.get('status')
|
||||
status = response.get('status')
|
||||
assert (status >= 200 or status < 300)
|
||||
|
||||
response = dict()
|
||||
c.put_container(url, token, 'dependency', None, None, response)
|
||||
status = response.get('status')
|
||||
status = response.get('status')
|
||||
assert (status >= 200 or status < 300)
|
||||
|
||||
response = dict()
|
||||
c.put_container(url, token, 'storletlog', None, None, response)
|
||||
status = response.get('status')
|
||||
status = response.get('status')
|
||||
assert (status >= 200 or status < 300)
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def put_file_as_storlet_input_object(url, token, local_path, local_file):
|
||||
resp = dict()
|
||||
f = open('%s/%s' %(local_path, local_file),'r')
|
||||
c.put_object(url, token, 'myobjects', local_file, f,
|
||||
content_type = "application/octet-stream",
|
||||
response_dict = resp)
|
||||
f = open('%s/%s' % (local_path, local_file), 'r')
|
||||
c.put_object(url, token, 'myobjects', local_file, f,
|
||||
content_type="application/octet-stream",
|
||||
response_dict=resp)
|
||||
f.close()
|
||||
status = resp.get('status')
|
||||
status = resp.get('status')
|
||||
assert (status == 200 or status == 201)
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
def put_storlet_object(url, token, storlet_name, storlet_path,
|
||||
|
||||
|
||||
def put_storlet_object(url, token, storlet_name, storlet_path,
|
||||
dependency, main_class):
|
||||
# Delete previous storlet
|
||||
resp = dict()
|
||||
'''
|
||||
try:
|
||||
c.delete_object(url, token, 'storlet', storlet_name, None,
|
||||
# resp = dict()
|
||||
'''try:
|
||||
|
||||
c.delete_object(url, token, 'storlet', storlet_name, None,
|
||||
None, None, None, resp)
|
||||
except Exception as e:
|
||||
if (resp.get('status')== 404):
|
||||
print 'Nothing to delete'
|
||||
'''
|
||||
metadata = {'X-Object-Meta-Storlet-Language':'Java',
|
||||
'X-Object-Meta-Storlet-Interface-Version':'1.0',
|
||||
'X-Object-Meta-Storlet-Dependency': dependency,
|
||||
'X-Object-Meta-Storlet-Object-Metadata':'no',
|
||||
print 'Nothing to delete'
|
||||
'''
|
||||
metadata = {'X-Object-Meta-Storlet-Language': 'Java',
|
||||
'X-Object-Meta-Storlet-Interface-Version': '1.0',
|
||||
'X-Object-Meta-Storlet-Dependency': dependency,
|
||||
'X-Object-Meta-Storlet-Object-Metadata': 'no',
|
||||
'X-Object-Meta-Storlet-Main': main_class}
|
||||
f = open('%s/%s' % (storlet_path, storlet_name),'r')
|
||||
f = open('%s/%s' % (storlet_path, storlet_name), 'r')
|
||||
content_length = None
|
||||
response = dict()
|
||||
c.put_object(url, token, 'storlet', storlet_name, f,
|
||||
content_length, None, None,
|
||||
"application/octet-stream", metadata,
|
||||
c.put_object(url, token, 'storlet', storlet_name, f,
|
||||
content_length, None, None,
|
||||
"application/octet-stream", metadata,
|
||||
None, None, None, response)
|
||||
f.close()
|
||||
status = response.get('status')
|
||||
status = response.get('status')
|
||||
assert (status == 200 or status == 201)
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def put_dependency(url, token, local_path_to_dep, dep_name):
|
||||
metadata = {'X-Object-Meta-Storlet-Dependency-Version': '1'}
|
||||
f = open('%s/%s' %(local_path_to_dep, dep_name),'r')
|
||||
f = open('%s/%s' % (local_path_to_dep, dep_name), 'r')
|
||||
content_length = None
|
||||
response = dict()
|
||||
c.put_object(url, token, 'dependency', dep_name, f,
|
||||
content_length, None, None, "application/octet-stream",
|
||||
c.put_object(url, token, 'dependency', dep_name, f,
|
||||
content_length, None, None, "application/octet-stream",
|
||||
metadata, None, None, None, response)
|
||||
f.close()
|
||||
status = response.get('status')
|
||||
status = response.get('status')
|
||||
assert (status == 200 or status == 201)
|
||||
|
@ -17,52 +17,50 @@ Limitations under the License.
|
||||
@author: gilv
|
||||
'''
|
||||
|
||||
import threading
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import tarfile
|
||||
import threading
|
||||
|
||||
from sys_test_params import *
|
||||
from swiftclient import client as c
|
||||
from swiftclient.client import encode_utf8, http_connection
|
||||
from sys_test_params import ACCOUNT
|
||||
from sys_test_params import AUTH_IP
|
||||
from sys_test_params import AUTH_PORT
|
||||
from sys_test_params import PASSWORD
|
||||
from sys_test_params import USER_NAME
|
||||
|
||||
from storlets_test_utils import enable_account_for_storlets, \
|
||||
put_dependency, put_storlet_containers, put_storlet_object
|
||||
from storlets_test_utils import put_storlet_object
|
||||
|
||||
TEST_STORLET_NAME = 'test-10.jar'
|
||||
|
||||
TEST_STORLET_NAME='test-10.jar'
|
||||
|
||||
PATH_TO_STORLET_GIT_MODULE = ''
|
||||
PATH_TO_STORLETS = ''
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def invokeTestStorlet(url, token, op, withlog=False):
|
||||
headers = {'X-Run-Storlet':TEST_STORLET_NAME}
|
||||
if withlog == True:
|
||||
headers = {'X-Run-Storlet': TEST_STORLET_NAME}
|
||||
if withlog is True:
|
||||
headers['X-Storlet-Generate-Log'] = 'True'
|
||||
|
||||
params = 'op={0}¶m2=val2'.format(op)
|
||||
resp_dict = dict()
|
||||
try:
|
||||
resp_headers, gf = c.get_object(url, token, 'myobjects',
|
||||
'test_object', None, None, params,
|
||||
resp_headers, gf = c.get_object(url, token, 'myobjects',
|
||||
'test_object', None, None, params,
|
||||
resp_dict, headers)
|
||||
#print resp_dict
|
||||
# print(resp_dict)
|
||||
get_text = gf
|
||||
#print get_text
|
||||
# print(get_text)
|
||||
get_response_status = resp_dict.get('status')
|
||||
|
||||
if withlog == True:
|
||||
resp_headers, gf = c.get_object(url, token,
|
||||
'storletlog', 'test.log',
|
||||
|
||||
if withlog is True:
|
||||
resp_headers, gf = c.get_object(url, token,
|
||||
'storletlog', 'test.log',
|
||||
None, None, None, None, headers)
|
||||
assert resp_headers.get('status') == 200
|
||||
text = gf.read()
|
||||
gf.read()
|
||||
assert resp_headers.get('status') == 200
|
||||
#print text
|
||||
|
||||
|
||||
if op == 'print':
|
||||
assert get_response_status == 200
|
||||
assert 'op' in get_text
|
||||
@ -73,27 +71,32 @@ def invokeTestStorlet(url, token, op, withlog=False):
|
||||
except Exception as e:
|
||||
get_response_status = resp_dict.get('status')
|
||||
if op == 'crash':
|
||||
print get_response_status
|
||||
print(get_response_status)
|
||||
assert get_response_status >= 500 or get_response_status == 404
|
||||
|
||||
|
||||
if op == 'hold':
|
||||
#print get_response_status
|
||||
# print(get_response_status)
|
||||
assert get_response_status >= 500 or get_response_status == 404
|
||||
|
||||
if op == 'print':
|
||||
#print get_response_status
|
||||
# print(get_response_status)
|
||||
raise e
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
class myTestThread (threading.Thread):
|
||||
def __init__(self, url, token):
|
||||
threading.Thread.__init__(self)
|
||||
self.token = token
|
||||
self.url = url
|
||||
|
||||
def run(self):
|
||||
invokeTestStorlet(self.url, self.token, "print", False)
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def invokeTestStorletinParallel(url, token):
|
||||
mythreads = []
|
||||
|
||||
@ -103,63 +106,67 @@ def invokeTestStorletinParallel(url, token):
|
||||
|
||||
for t in mythreads:
|
||||
t.start()
|
||||
|
||||
|
||||
for t in mythreads:
|
||||
t.join()
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def testTestStorlet(url, token):
|
||||
print "Deploying test storlet"
|
||||
put_storlet_object(url,
|
||||
token,
|
||||
print("Deploying test storlet")
|
||||
put_storlet_object(url,
|
||||
token,
|
||||
TEST_STORLET_NAME,
|
||||
"%s/TestStorlet/bin/" % PATH_TO_STORLETS,
|
||||
'',
|
||||
'',
|
||||
'com.ibm.storlet.test.test1')
|
||||
|
||||
print "uploading object to execute test upon"
|
||||
print("uploading object to execute test upon")
|
||||
c.put_object(url,
|
||||
token,
|
||||
'myobjects',
|
||||
'test_object',
|
||||
'some content')
|
||||
print "Invoking test storlet to print"
|
||||
print("Invoking test storlet to print")
|
||||
invokeTestStorlet(url, token, "print", False)
|
||||
print "Invoking test storlet to crash"
|
||||
print("Invoking test storlet to crash")
|
||||
invokeTestStorlet(url, token, "crash")
|
||||
print "Invoking test storlet to hold"
|
||||
print("Invoking test storlet to hold")
|
||||
invokeTestStorlet(url, token, "hold")
|
||||
print "Invoking test storlet to print"
|
||||
print("Invoking test storlet to print")
|
||||
invokeTestStorlet(url, token, "print", False)
|
||||
print "Invoking test storlet in parallel to print"
|
||||
print("Invoking test storlet in parallel to print")
|
||||
invokeTestStorletinParallel(url, token)
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def init_path_dependant_params():
|
||||
global PATH_TO_STORLET_GIT_MODULE
|
||||
global PATH_TO_STORLET_GIT_MODULE
|
||||
global PATH_TO_STORLETS
|
||||
PATH_TO_STORLET_GIT_MODULE = '../'
|
||||
if PATH_TO_STORLET_GIT_MODULE == '':
|
||||
PATH_TO_STORLET_GIT_MODULE = os.environ['HOME'] + \
|
||||
'/workspace/Storlets'
|
||||
PATH_TO_STORLETS='%s/StorletSamples' % PATH_TO_STORLET_GIT_MODULE
|
||||
|
||||
PATH_TO_STORLET_GIT_MODULE = os.environ['HOME'] + '/workspace/Storlets'
|
||||
PATH_TO_STORLETS = '%s/StorletSamples' % PATH_TO_STORLET_GIT_MODULE
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
def main():
|
||||
init_path_dependant_params()
|
||||
|
||||
print 'Getting token'
|
||||
print('Getting token')
|
||||
os_options = {'tenant_name': ACCOUNT}
|
||||
url, token = c.get_auth("http://" + AUTH_IP + ":" + AUTH_PORT \
|
||||
+ "/v2.0", ACCOUNT + ":" + USER_NAME,
|
||||
PASSWORD, os_options = os_options,
|
||||
url, token = c.get_auth("http://" + AUTH_IP + ":" + AUTH_PORT
|
||||
+ "/v2.0", ACCOUNT + ":" + USER_NAME,
|
||||
PASSWORD, os_options=os_options,
|
||||
auth_version="2.0")
|
||||
|
||||
print 'Creating myobjects container'
|
||||
print('Creating myobjects container')
|
||||
c.put_container(url, token, 'myobjects')
|
||||
|
||||
print 'Invoking test storlet'
|
||||
|
||||
print('Invoking test storlet')
|
||||
testTestStorlet(url, token)
|
||||
|
||||
os.system('python execdep_test.py')
|
||||
@ -167,7 +174,7 @@ def main():
|
||||
os.system('python half_storlet_test.py')
|
||||
os.system('python metadata_storlet_test.py')
|
||||
os.system('python SLO_test.py')
|
||||
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
|
||||
Limitations under the License.
|
||||
-------------------------------------------------------------------------'''
|
||||
'''
|
||||
IMPORTANT: Make sure the variables AUTH_PI and KEYSTONE_IP point to the system
|
||||
IMPORTANT: Make sure the variables AUTH_PI and KEYSTONE_IP point to the system
|
||||
you are testing!!!
|
||||
'''
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
@ -18,22 +18,21 @@ Limitations under the License.
|
||||
13-Jan-2015 evgenyl Initial implementation.
|
||||
==========================================================================='''
|
||||
|
||||
import time
|
||||
import sys
|
||||
import time
|
||||
|
||||
'''------------------------------------------------------------------------'''
|
||||
|
||||
|
||||
class TextUIProgressBar:
|
||||
'''
|
||||
@summary: This class simulates Progress Bar GUI widget in UNIX terminal.
|
||||
class TextUIProgressBar(object):
|
||||
'''@summary: This class simulates Progress Bar GUI widget in UNIX terminal.
|
||||
|
||||
'''
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def __init__(self):
|
||||
'''
|
||||
@summary: CTOR, define some constant mapping
|
||||
'''
|
||||
'''@summary: CTOR, define some constant mapping'''
|
||||
self.colors = {}
|
||||
self.colors['gray'] = '30'
|
||||
self.colors['red'] = '31'
|
||||
@ -45,12 +44,12 @@ class TextUIProgressBar:
|
||||
self.colors['white'] = '37'
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
def update_progress_bar(self, complete, total, caption = '', color='' ):
|
||||
'''
|
||||
@summary: update_progress_bar
|
||||
Drawing code. The idea is
|
||||
|
||||
def update_progress_bar(self, complete, total, caption='', color=''):
|
||||
'''@summary: update_progress_bar Drawing code. The idea is
|
||||
|
||||
- jump to the beginning of the line
|
||||
- print the same amount of characters
|
||||
- print the same amount of characters
|
||||
but in a different proportion (complete/total)
|
||||
@param complete: How many steps were completed?
|
||||
@type complete: Integer, not-negative
|
||||
@ -58,30 +57,32 @@ class TextUIProgressBar:
|
||||
@type total: Integer, not-negative
|
||||
@param caption: Description to add after the bar
|
||||
@type caption: String
|
||||
@param color: Which color to use while drawing?
|
||||
@param color: Which color to use while drawing?
|
||||
Only a predefined set of colors is supported
|
||||
@type color: String
|
||||
'''
|
||||
color = self.colors.get(color, self.colors['white'])
|
||||
color = self.colors.get(color, self.colors['white'])
|
||||
color_start = '\033[01;' + color + 'm'
|
||||
color_stop = '\033[00m'
|
||||
print '\r' + color_start + u'\u2591'*complete + \
|
||||
u'\u2593'*(total-complete) + color_stop,
|
||||
color_stop = '\033[00m'
|
||||
sys.stdout.write('\r' + color_start + u'\u2591' * complete +
|
||||
u'\u2593' * (total - complete) + color_stop)
|
||||
if 0 < len(caption):
|
||||
print '{0}'.format(caption) ,
|
||||
sys.stdout.write('{0}'.format(caption))
|
||||
sys.stdout.flush()
|
||||
|
||||
'''--------------------------------------------------------------------'''
|
||||
|
||||
def test(self):
|
||||
'''@summary: test
|
||||
|
||||
Unit test. Simulate a process of 10 steps with delay of one
|
||||
second after each step.
|
||||
'''
|
||||
@summary: test
|
||||
Unit test. Simulate a process of 10 steps with
|
||||
delay of one second after each step.
|
||||
'''
|
||||
|
||||
k = self.colors.keys()
|
||||
l = len(k)
|
||||
for j in range(1, l+1):
|
||||
self.update_progress_bar(j, l, str(j), k[j-1])
|
||||
for j in range(1, l + 1):
|
||||
self.update_progress_bar(j, l, str(j), k[j - 1])
|
||||
time.sleep(1)
|
||||
|
||||
'''============================= END OF FILE =============================='''
|
||||
|
75
doc/source/conf.py
Executable file
75
doc/source/conf.py
Executable file
@ -0,0 +1,75 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.abspath('../..'))
|
||||
# -- General configuration ----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
#'sphinx.ext.intersphinx',
|
||||
'oslosphinx'
|
||||
]
|
||||
|
||||
# autodoc generation is a bit aggressive and a nuisance when doing heavy
|
||||
# text edit cycles.
|
||||
# execute "export SPHINX_DEBUG=1" in your terminal to disable
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'storlets'
|
||||
copyright = u'2013, OpenStack Foundation'
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# -- Options for HTML output --------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
# html_theme_path = ["."]
|
||||
# html_theme = '_theme'
|
||||
# html_static_path = ['static']
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = '%sdoc' % project
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass
|
||||
# [howto/manual]).
|
||||
latex_documents = [
|
||||
('index',
|
||||
'%s.tex' % project,
|
||||
u'%s Documentation' % project,
|
||||
u'OpenStack Foundation', 'manual'),
|
||||
]
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
#intersphinx_mapping = {'http://docs.python.org/': None}
|
4
doc/source/contributing.rst
Normal file
4
doc/source/contributing.rst
Normal file
@ -0,0 +1,4 @@
|
||||
============
|
||||
Contributing
|
||||
============
|
||||
.. include:: ../../CONTRIBUTING.rst
|
25
doc/source/index.rst
Normal file
25
doc/source/index.rst
Normal file
@ -0,0 +1,25 @@
|
||||
.. storlets documentation master file, created by
|
||||
sphinx-quickstart on Tue Jul 9 22:26:36 2013.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to storlets's documentation!
|
||||
========================================================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
readme
|
||||
installation
|
||||
usage
|
||||
contributing
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
12
doc/source/installation.rst
Normal file
12
doc/source/installation.rst
Normal file
@ -0,0 +1,12 @@
|
||||
============
|
||||
Installation
|
||||
============
|
||||
|
||||
At the command line::
|
||||
|
||||
$ pip install storlets
|
||||
|
||||
Or, if you have virtualenvwrapper installed::
|
||||
|
||||
$ mkvirtualenv storlets
|
||||
$ pip install storlets
|
1
doc/source/readme.rst
Normal file
1
doc/source/readme.rst
Normal file
@ -0,0 +1 @@
|
||||
.. include:: ../../README.rst
|
7
doc/source/usage.rst
Normal file
7
doc/source/usage.rst
Normal file
@ -0,0 +1,7 @@
|
||||
========
|
||||
Usage
|
||||
========
|
||||
|
||||
To use storlets in a project::
|
||||
|
||||
import storlets
|
6
openstack-common.conf
Normal file
6
openstack-common.conf
Normal file
@ -0,0 +1,6 @@
|
||||
[DEFAULT]
|
||||
|
||||
# The list of modules to copy from oslo-incubator.git
|
||||
|
||||
# The base module to hold the copy of openstack.common
|
||||
base=storlets
|
6
requirements.txt
Normal file
6
requirements.txt
Normal file
@ -0,0 +1,6 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
pbr>=0.6,!=0.7,<1.0
|
||||
Babel>=1.3
|
47
setup.cfg
Normal file
47
setup.cfg
Normal file
@ -0,0 +1,47 @@
|
||||
[metadata]
|
||||
name = storlets
|
||||
summary = Middleware and Compute Engine for an OpenStack Swift compute framework that runs compute within a Swift cluster
|
||||
description-file =
|
||||
README.rst
|
||||
author = OpenStack
|
||||
author-email = openstack-dev@lists.openstack.org
|
||||
home-page = http://www.openstack.org/
|
||||
classifier =
|
||||
Environment :: OpenStack
|
||||
Intended Audience :: Information Technology
|
||||
Intended Audience :: System Administrators
|
||||
License :: OSI Approved :: Apache Software License
|
||||
Operating System :: POSIX :: Linux
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 2
|
||||
Programming Language :: Python :: 2.7
|
||||
Programming Language :: Python :: 2.6
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.3
|
||||
Programming Language :: Python :: 3.4
|
||||
|
||||
[files]
|
||||
packages =
|
||||
storlets
|
||||
|
||||
[build_sphinx]
|
||||
source-dir = doc/source
|
||||
build-dir = doc/build
|
||||
all_files = 1
|
||||
|
||||
[upload_sphinx]
|
||||
upload-dir = doc/build/html
|
||||
|
||||
[compile_catalog]
|
||||
directory = storlets/locale
|
||||
domain = storlets
|
||||
|
||||
[update_catalog]
|
||||
domain = storlets
|
||||
output_dir = storlets/locale
|
||||
input_file = storlets/locale/storlets.pot
|
||||
|
||||
[extract_messages]
|
||||
keywords = _ gettext ngettext l_ lazy_gettext
|
||||
mapping_file = babel.cfg
|
||||
output_file = storlets/locale/storlets.pot
|
29
setup.py
Normal file
29
setup.py
Normal file
@ -0,0 +1,29 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
|
||||
import setuptools
|
||||
|
||||
# In python < 2.7.4, a lazy loading of package `pbr` will break
|
||||
# setuptools if some other modules registered functions in `atexit`.
|
||||
# solution from: http://bugs.python.org/issue15881#msg170215
|
||||
try:
|
||||
import multiprocessing # noqa
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
setuptools.setup(
|
||||
setup_requires=['pbr'],
|
||||
pbr=True)
|
18
storlets/__init__.py
Normal file
18
storlets/__init__.py
Normal file
@ -0,0 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import pbr.version
|
||||
|
||||
__version__ = pbr.version.VersionInfo(
|
||||
'storlets').version_string()
|
0
storlets/tests/__init__.py
Normal file
0
storlets/tests/__init__.py
Normal file
23
storlets/tests/base.py
Normal file
23
storlets/tests/base.py
Normal file
@ -0,0 +1,23 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2010-2011 OpenStack Foundation
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslotest import base
|
||||
|
||||
|
||||
class TestCase(base.BaseTestCase):
|
||||
|
||||
"""Test case base class for all unit tests."""
|
28
storlets/tests/test_storlets.py
Normal file
28
storlets/tests/test_storlets.py
Normal file
@ -0,0 +1,28 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
test_storlets
|
||||
----------------------------------
|
||||
|
||||
Tests for `storlets` module.
|
||||
"""
|
||||
|
||||
from storlets.tests import base
|
||||
|
||||
|
||||
class TestStorlets(base.TestCase):
|
||||
|
||||
def test_something(self):
|
||||
pass
|
15
test-requirements.txt
Normal file
15
test-requirements.txt
Normal file
@ -0,0 +1,15 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
hacking<0.11,>=0.10.0
|
||||
|
||||
coverage>=3.6
|
||||
discover
|
||||
python-subunit>=0.0.18
|
||||
sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3
|
||||
oslosphinx>=2.2.0 # Apache-2.0
|
||||
oslotest>=1.2.0 # Apache-2.0
|
||||
testrepository>=0.0.18
|
||||
testscenarios>=0.4
|
||||
testtools>=0.9.36,!=1.2.0
|
35
tox.ini
Normal file
35
tox.ini
Normal file
@ -0,0 +1,35 @@
|
||||
[tox]
|
||||
minversion = 1.6
|
||||
envlist = py27,pep8
|
||||
skipsdist = True
|
||||
|
||||
[testenv]
|
||||
usedevelop = True
|
||||
install_command = pip install -U {opts} {packages}
|
||||
setenv =
|
||||
VIRTUAL_ENV={envdir}
|
||||
deps = -r{toxinidir}/test-requirements.txt
|
||||
commands = python setup.py test --slowest --testr-args='{posargs}'
|
||||
|
||||
[testenv:pep8]
|
||||
commands = flake8
|
||||
|
||||
[testenv:venv]
|
||||
commands = {posargs}
|
||||
|
||||
[testenv:cover]
|
||||
commands = python setup.py test --coverage --testr-args='{posargs}'
|
||||
|
||||
[testenv:docs]
|
||||
commands = python setup.py build_sphinx
|
||||
|
||||
[testenv:debug]
|
||||
commands = oslo_debug_helper {posargs}
|
||||
|
||||
[flake8]
|
||||
# E123, E125 skipped as they are invalid PEP-8.
|
||||
|
||||
show-source = True
|
||||
ignore = E123,E125
|
||||
builtins = _
|
||||
exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build
|
Loading…
Reference in New Issue
Block a user