Commit edd593b4 authored by Nicolas Wavrant's avatar Nicolas Wavrant

Webrunner tests and new tools for resiliency

Adds tests for the Webrunner, and updates its code to make it easier to test. These tests are still incomplete, but they have the merit of existing. I don't really like mocking function calls, but globally it's a better way than spawning a supervisord server and recreating the whole slaprunner's system tree.  

Also adds tools (= scripts) which will be usefull for the monitoring in slapos : 
  * generation of RSS feed from status items, and management of them.
  * script for creating promises based on RSS feeds 

/reviewed-on nexedi/slapos.toolbox!8
parents 92680783 ad11ab67
...@@ -56,12 +56,16 @@ setup(name=name, ...@@ -56,12 +56,16 @@ setup(name=name,
'networkbench' : ['pycurl'], 'networkbench' : ['pycurl'],
'check_web_page_http_cache_hit' : ['pycurl'], # needed for check_web_page_http_cache_hit module 'check_web_page_http_cache_hit' : ['pycurl'], # needed for check_web_page_http_cache_hit module
}, },
tests_require = [
'mock',
],
zip_safe=False, # proxy depends on Flask, which has issues with zip_safe=False, # proxy depends on Flask, which has issues with
# accessing templates # accessing templates
entry_points={ entry_points={
'console_scripts': [ 'console_scripts': [
'agent = slapos.agent.agent:main', 'agent = slapos.agent.agent:main',
'check-web-page-http-cache-hit = slapos.promise.check_web_page_http_cache_hit:main', 'check-web-page-http-cache-hit = slapos.promise.check_web_page_http_cache_hit:main',
'check-feed-as-promise = slapos.checkfeedaspromise:main',
'clouddestroy = slapos.cloudmgr.destroy:main', 'clouddestroy = slapos.cloudmgr.destroy:main',
'cloudgetprivatekey = slapos.cloudmgr.getprivatekey:main', 'cloudgetprivatekey = slapos.cloudmgr.getprivatekey:main',
'cloudgetpubliciplist = slapos.cloudmgr.getpubliciplist:main', 'cloudgetpubliciplist = slapos.cloudmgr.getpubliciplist:main',
...@@ -70,6 +74,7 @@ setup(name=name, ...@@ -70,6 +74,7 @@ setup(name=name,
'cloudstart = slapos.cloudmgr.start:main', 'cloudstart = slapos.cloudmgr.start:main',
'cloudstop = slapos.cloudmgr.stop:main', 'cloudstop = slapos.cloudmgr.stop:main',
'equeue = slapos.equeue:main', 'equeue = slapos.equeue:main',
'generatefeed = slapos.generatefeed:main',
'htpasswd = slapos.htpasswd:main', 'htpasswd = slapos.htpasswd:main',
'is-local-tcp-port-opened = slapos.promise.is_local_tcp_port_opened:main', 'is-local-tcp-port-opened = slapos.promise.is_local_tcp_port_opened:main',
'is-process-older-than-dependency-set = slapos.promise.is_process_older_than_dependency_set:main', 'is-process-older-than-dependency-set = slapos.promise.is_process_older_than_dependency_set:main',
...@@ -89,6 +94,7 @@ setup(name=name, ...@@ -89,6 +94,7 @@ setup(name=name,
'pubsubnotifier = slapos.pubsub.notifier:main', 'pubsubnotifier = slapos.pubsub.notifier:main',
'pubsubserver = slapos.pubsub:main', 'pubsubserver = slapos.pubsub:main',
'qemu-qmp-client = slapos.qemuqmpclient:main', 'qemu-qmp-client = slapos.qemuqmpclient:main',
'rdiffbackup.genstatrss = slapos.resilient.rdiffBackupStat2RSS:main',
'slapos-kill = slapos.systool:kill', 'slapos-kill = slapos.systool:kill',
'slaprunnertest = slapos.runner.runnertest:main', 'slaprunnertest = slapos.runner.runnertest:main',
'slaprunnerteststandalone = slapos.runner.runnertest:runStandaloneUnitTest', 'slaprunnerteststandalone = slapos.runner.runnertest:runStandaloneUnitTest',
......
# Command line script to test a RSS feed in a promise
# Checks that a given pattern can be found (or not) in the title or the
# description of the latest feed item.
# A time buffer option can be given, to determine if the emitter process is in
# a stalled state, in the case that no OK pattern has been found
import argparse
import datetime
import feedparser
import sys
def parseArguments():
parser = argparse.ArgumentParser()
parser.add_argument('--feed-path', dest='feed_path',
help='Path or Url of the feed to search')
parser.add_argument('--title', dest='title', action='store_true',
help='Patterns should be looked for in feed item\'s title')
parser.add_argument('--description', dest='description', action='store_true',
help='Patterns should be looked for in feed item\'s description')
parser.add_argument('--ok-pattern', dest='ok_pattern_list', action='append',
default=[],
help='If this pattern is found, then promise succeeds')
parser.add_argument('--ko-pattern', dest='ko_pattern_list', action='append',
default=[],
help='If this pattern is found, then promise fails')
parser.add_argument('--time-buffer', dest='time_buffer', type=int,
default=0,
help='Time delta in seconds before the promise really succeeds or fails')
return parser.parse_args()
def containsPattern(string, pattern_list):
for pattern in pattern_list:
if string.find(pattern) >= 0:
return True
return False
def checkFeedAsPromise(feed, option):
feed = feedparser.parse(feed)
if feed.bozo:
return 'Feed malformed'
if len(feed.entries) == 0:
return ''
last_item = feed.entries[-1]
if option.title:
candidate_string = last_item.title
elif option.description:
candidate_string = last_item.description
else:
return 'At least one in [--title|--description] should be provided'
publication_date = datetime.datetime(*last_item.published_parsed[:7])
publication_age = datetime.datetime.now() - publication_date
time_buffer = datetime.timedelta(seconds=option.time_buffer)
ok_pattern_found = containsPattern(candidate_string, option.ok_pattern_list)
ko_pattern_found = containsPattern(candidate_string, option.ko_pattern_list)
if ok_pattern_found and ko_pattern_found:
return 'Both OK and KO patterns found: please check arguments'
# Expectations fulfilled
if ok_pattern_found:
return ''
if ko_pattern_found:
return 'KO pattern found'
if not ok_pattern_found:
if publication_age < time_buffer:
# We have to wait for buffer to expire
return ''
else:
# If time-buffer is out, we are in stalled state
return 'Stalled situation'
# If not ok, and not stalled, what can have possibly happen ?
return 'Something went wrong'
def main():
option = parseArguments()
result = checkFeedAsPromise(option.feed_path, option)
if len(result) > 0:
sys.exit(result)
else:
sys.exit(0)
if __name__ == '__main__':
main()
# Command-line script to generate a RSS feed from a bunch of well-formated
# JSON items.
# This script tries to be the more generic possible. The items used to generate
# the feed must be JSON-formatted (because of simplicity to read/write them),
# and their keys must follow the names of elements of items as described
# in the RSS2 specification :
# http://cyber.law.harvard.edu/rss/rss.html#hrelementsOfLtitemgt
import argparse
import collections
import datetime
import json
import os
import PyRSS2Gen as rss
def parseArguments():
parser = argparse.ArgumentParser()
parser.add_argument('--output', dest='output', type=str, required=True,
help='Path where to save the file')
parser.add_argument('--status-item-path', dest='status_item_path',
type=str, required=True,
help='Path where to find feed items')
parser.add_argument('--max-item', dest='max_item', type=int,
default=50, required=False,
help='Maximum number of items in the feed')
parser.add_argument('--title', dest='feed_title', type=str, required=True,
help='Title of the feed')
parser.add_argument('--link', dest='feed_link', type=str, required=True,
help='Link of the feed')
parser.add_argument('--description', dest='feed_description',
type=str, required=False,
help='Description of the feed')
option = parser.parse_args()
if not hasattr(option, 'feed_description'):
option.feed_description = option.feed_title
return option
def deleteFileList(file_list):
for file in file_list:
try:
os.unlink(file)
except OSError:
pass
def getRSSItemListFromItemDict(item_dict):
rss_item_list = []
for item in item_dict:
item_dict[item]['pubDate'] = datetime.datetime.fromtimestamp(item_dict[item]['pubDate'])
rss_item_list.append(rss.RSSItem(**item_dict[item]))
return rss_item_list
def generateFeed(option):
item_dict = {} # {file: content}
for filename in os.listdir(option.status_item_path):
file_path = os.path.join(option.status_item_path, filename)
with open(file_path, 'r') as fd:
item_dict[file_path] = json.load(fd)
sorted_item_dict = collections.OrderedDict(
sorted(item_dict.items(), key=lambda x: x[1]['pubDate']))
# Reduces feed if number of items exceeds max_item
if len(item_dict) > option.max_item:
outdated_key_list = sorted_item_dict.keys()[option.max_item:]
for outdated_key in outdated_key_list:
del sorted_item_dict[outdated_key]
deleteFileList(outdated_key_list)
# Generate feed
feed = rss.RSS2(
title=option.feed_title,
link=option.feed_link,
description=option.feed_description,
lastBuildDate = datetime.datetime.now(),
items = getRSSItemListFromItemDict(sorted_item_dict)
)
return feed.to_xml()
def main():
option = parseArguments()
feed = generateFeed(option)
open(option.output, 'w').write(feed)
if __name__ == "__main__":
main()
...@@ -3,7 +3,10 @@ ...@@ -3,7 +3,10 @@
import argparse import argparse
import csv import csv
import datetime
import json
import httplib import httplib
import os
import socket import socket
import subprocess import subprocess
import sys import sys
...@@ -13,6 +16,20 @@ import urllib2 ...@@ -13,6 +16,20 @@ import urllib2
import urlparse import urlparse
import uuid import uuid
def createStatusItem(item_directory, instance_name, callback, date, link, status):
global app
callback_short_name = os.path.basename(callback)
content = json.dumps({
'title': '%s-PBS %s : %s' % (instance_name, callback_short_name, status),
'description': '%s run at %s' % (callback_short_name, datetime.datetime.fromtimestamp(date).isoformat()),
'pubDate': date,
'link': link,
})
item_path = os.path.join(item_directory, "status_%s" % time.time())
with open(item_path, 'w') as file:
file.write(content)
def main(): def main():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
...@@ -32,8 +49,31 @@ def main(): ...@@ -32,8 +49,31 @@ def main():
type=int, required=False, type=int, required=False,
help="Additional parameter for notification-url") help="Additional parameter for notification-url")
# Verbose mode
parser.add_argument('--instance-root-name', dest='instance_root_name',
type=str, required=False,
help="Path to config file containing info on instance")
parser.add_argument('--log-url', required=False, dest='log_url',
help="URL where the log file will be accessible")
parser.add_argument('--status-item-directory', dest='status_item_directory',
required=False, default='', type=str,
help="Directory containing PBS status to publish as feed.")
args = parser.parse_args() args = parser.parse_args()
if args.instance_root_name and args.log_url and args.status_item_directory:
# Verbose mode
saveStatus = lambda status: createStatusItem(args.status_item_directory,
args.instance_root_name,
args.executable[0],
time.time(),
args.log_url,
status)
else:
saveStatus = lambda status: None
saveStatus('STARTED')
try: try:
content = subprocess.check_output( content = subprocess.check_output(
args.executable[0], args.executable[0],
...@@ -45,7 +85,9 @@ def main(): ...@@ -45,7 +85,9 @@ def main():
args.executable[0], args.executable[0],
content.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;') content.replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;')
)) ))
saveStatus('FINISHED')
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
saveStatus('ERROR')
content = e.output content = e.output
exit_code = e.returncode exit_code = e.returncode
content = ("FAILURE</br><p>%s Failed with returncode <em>%d</em>.</p>" content = ("FAILURE</br><p>%s Failed with returncode <em>%d</em>.</p>"
...@@ -91,23 +133,30 @@ def main(): ...@@ -91,23 +133,30 @@ def main():
notification_path += str(transaction_id) notification_path += str(transaction_id)
headers = {'Content-Type': feed.info().getheader('Content-Type')} headers = {'Content-Type': feed.info().getheader('Content-Type')}
error_message = ""
try: try:
notification = httplib.HTTPConnection(notification_url.hostname, notification = httplib.HTTPConnection(notification_url.hostname,
notification_port) notification_port)
notification.request('POST', notification_path, body, headers) notification.request('POST', notification_path, body, headers)
response = notification.getresponse() response = notification.getresponse()
if not (200 <= response.status < 300): if not (200 <= response.status < 300):
sys.stderr.write("The remote server at %s didn't send a successful reponse.\n" % notif_url) error_message = ("The remote server at %s didn't send a successful reponse.\n"
sys.stderr.write("Its response was %r\n" % response.reason) "Its response was %r\n") % (notif_url, response.reason)
some_notification_failed = True some_notification_failed = True
except socket.error as exc: except socket.error as exc:
sys.stderr.write("Connection with remote server at %s failed:\n" % notif_url) error_message = "Connection with remote server at %s failed:\n" % notif_url
sys.stderr.write(traceback.format_exc(exc)) error_message.append(traceback.format_exc(exc))
some_notification_failed = True some_notification_failed = True
finally:
if error_message:
sys.stderr.write(error_message)
saveStatus(saveStatus('ERROR ON NOTIFYING : %s') % error_message)
if some_notification_failed: if some_notification_failed:
sys.exit(1) sys.exit(1)
saveStatus('OK')
if __name__ == '__main__': if __name__ == '__main__':
main() main()
import argparse
import datetime
import os
import re
import time
import PyRSS2Gen as RSS2
from collections import OrderedDict
def parseArguments():
"""
Parse arguments for rdiff-backup statistics Rss Generator.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--rdiff_backup_data_folder',
help='Path where to find rdiff-backup statistical files')
parser.add_argument('--output',
help='Path where to save the feed')
parser.add_argument('--feed_url',
help='Url of this feed file.')
return parser.parse_args()
def makeDictFromStatFile(text_content):
m = re.search("([a-zA-Z]*) ([0-9 :.]*) \(([a-zA-Z0-9 :.]*)\)", text_content)
if m:
return {'metric': m.group(1),
'value': m.group(2),
'human_readable_value': m.group(3)}
m = re.search("([a-zA-Z]*) ([0-9]*)", text_content)
if m:
return {'metric': m.group(1),
'value': m.group(2),
'human_readable_value': m.group(2)}
def getRSSItemFromDict(item, option):
description = "Metric;Value;Human Readable Value\n"
for entry in item:
description += "%s;%s;%s\n" % (entry['metric'], entry['value'], entry['human_readable_value'])
if entry['metric'] == "EndTime":
pubDate = datetime.datetime.fromtimestamp(float(entry['value']))
return RSS2.RSSItem(
title="Rdiff-Backup Transfer Statistics",
link=option.feed_url,
pubDate=pubDate,
description=description)
def genRSS(option):
"""
Read statistics file from rdiff-backup and generate a RSS feed entry from it
"""
stat_file_list = sorted([file for file in os.listdir(option.rdiff_backup_data_folder)
if file.startswith('session_statistics')])
item_dict = OrderedDict()
for stat_file in stat_file_list:
with open(os.path.join(option.rdiff_backup_data_folder, stat_file), 'r') as file:
item_dict[stat_file] = [makeDictFromStatFile(line.strip()) for line in file.readlines()]
title="Rdiff-Backup Statistics"
rss_feed = RSS2.RSS2(
title=title,
link=option.feed_url,
description=title,
items = [getRSSItemFromDict(item_dict[item], option) for item in item_dict])
return rss_feed.to_xml()
def main():
option = parseArguments()
feed = genRSS(option)
with open(option.output, 'w') as rss_file:
rss_file.write(feed)
exit(0)
...@@ -44,7 +44,7 @@ def runProcess(config, process): ...@@ -44,7 +44,7 @@ def runProcess(config, process):
Start a process registered by supervisor Start a process registered by supervisor
""" """
server = xmlrpclib.Server(config['supervisord_server']) server = xmlrpclib.Server(config['supervisord_server'])
server.supervisor.startProcess(process) return server.supervisor.startProcess(process)
def runProcesses(config, processes): def runProcesses(config, processes):
...@@ -63,16 +63,21 @@ def stopProcess(config, process): ...@@ -63,16 +63,21 @@ def stopProcess(config, process):
""" """
if isRunning(config, process): if isRunning(config, process):
server = xmlrpclib.Server(config['supervisord_server']) server = xmlrpclib.Server(config['supervisord_server'])
server.supervisor.stopProcess(process) return server.supervisor.stopProcess(process)
def stopProcesses(config, processes): def stopProcesses(config, processes):
""" """
Stop a list of processes Stop a list of processes.
Returns True if all the processes have ended correctly.
Returns False if at least one process didn't stop correctly.
""" """
server = xmlrpclib.Server(config['supervisord_server']) server = xmlrpclib.Server(config['supervisord_server'])
return_status_list = []
for proc in processes: for proc in processes:
server.supervisor.stopProcess(proc) return_status_list.append(server.supervisor.stopProcess(proc))
return len(return_status_list) == sum(return_status_list)
def waitForProcessEnd(config, process): def waitForProcessEnd(config, process):
......
...@@ -111,12 +111,13 @@ def getUsernameList(config): ...@@ -111,12 +111,13 @@ def getUsernameList(config):
def createNewUser(config, name, passwd): def createNewUser(config, name, passwd):
htpasswdfile = os.path.join(config['etc_dir'], '.htpasswd') htpasswdfile = os.path.join(config['etc_dir'], '.htpasswd')
if os.path.exists(htpasswdfile): try:
htpasswd = HtpasswdFile(htpasswdfile) htpasswd = HtpasswdFile(htpasswdfile, new=(not os.path.exists(htpasswdfile)))
htpasswd.set_password(name, passwd) htpasswd.set_password(name, passwd)
htpasswd.save() htpasswd.save()
return True except IOError:
return False return False
return True
def getCurrentSoftwareReleaseProfile(config): def getCurrentSoftwareReleaseProfile(config):
""" """
...@@ -128,7 +129,7 @@ def getCurrentSoftwareReleaseProfile(config): ...@@ -128,7 +129,7 @@ def getCurrentSoftwareReleaseProfile(config):
return realpath( return realpath(
config, os.path.join(software_folder, config['software_profile'])) config, os.path.join(software_folder, config['software_profile']))
# XXXX No Comments # XXXX No Comments
except: except IOError:
return '' return ''
...@@ -230,7 +231,7 @@ def startProxy(config): ...@@ -230,7 +231,7 @@ def startProxy(config):
if sup_process.isRunning(config, 'slapproxy'): if sup_process.isRunning(config, 'slapproxy'):
return return
try: try:
sup_process.runProcess(config, "slapproxy") return sup_process.runProcess(config, "slapproxy")
except xmlrpclib.Fault: except xmlrpclib.Fault:
pass pass
time.sleep(4) time.sleep(4)
...@@ -238,7 +239,7 @@ def startProxy(config): ...@@ -238,7 +239,7 @@ def startProxy(config):
def stopProxy(config): def stopProxy(config):
"""Stop Slapproxy server""" """Stop Slapproxy server"""
sup_process.stopProcess(config, "slapproxy") return sup_process.stopProcess(config, "slapproxy")
def removeProxyDb(config): def removeProxyDb(config):
...@@ -277,35 +278,57 @@ def waitProcess(config, process, step): ...@@ -277,35 +278,57 @@ def waitProcess(config, process, step):
date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
slapgridResultToFile(config, step, process.returncode, date) slapgridResultToFile(config, step, process.returncode, date)
def runSlapgridWithLock(config, step, process_name, lock=False):
def runSoftwareWithLock(config, lock=False):
""" """
Use Slapgrid to compile current Software Release and wait until * process_name is the name of the process given to supervisord, which will
compilation is done run the software or the instance
* step is one of ('software', 'instance')
* lock allows to make this function asynchronous or not
""" """
if sup_process.isRunning(config, 'slapgrid-sr'): if sup_process.isRunning(config, process_name):
return 1 return 1
if not os.path.exists(config['software_root']): root_folder = config["%s_root" % step]
os.mkdir(config['software_root']) log_file = config["%s_log" % step]
stopProxy(config)
startProxy(config) if not os.path.exists(root_folder):
os.mkdir(root_folder)
# XXX Hackish and unreliable # XXX Hackish and unreliable
if os.path.exists(config['software_log']): if os.path.exists(log_file):
os.remove(config['software_log']) os.remove(log_file)
if not updateProxy(config): if not updateProxy(config):
return 1 return 1
if step == 'instance' and not requestInstance(config):
return 1
try: try:
sup_process.runProcess(config, "slapgrid-sr") sup_process.runProcess(config, process_name)
if lock: if lock:
sup_process.waitForProcessEnd(config, "slapgrid-sr") sup_process.waitForProcessEnd(config, process_name)
#Saves the current compile software for re-use #Saves the current compile software for re-use
config_SR_folder(config) if step == 'software':
return sup_process.returnCode(config, "slapgrid-sr") config_SR_folder(config)
return sup_process.returnCode(config, process_name)
except xmlrpclib.Fault: except xmlrpclib.Fault:
return 1 return 1
def runSoftwareWithLock(config, lock=False):
"""
Use Slapgrid to compile current Software Release and wait until
compilation is done
"""
return runSlapgridWithLock(config, 'software', 'slapgrid-sr', lock)
def runInstanceWithLock(config, lock=False):
"""
Use Slapgrid to deploy current Software Release and wait until
deployment is done.
"""
return runSlapgridWithLock(config, 'instance', 'slapgrid-cp', lock)
def config_SR_folder(config): def config_SR_folder(config):
"""Create a symbolik link for each folder in software folder. That allows """Create a symbolik link for each folder in software folder. That allows
the user to customize software release folder""" the user to customize software release folder"""
...@@ -374,29 +397,6 @@ def isInstanceRunning(config): ...@@ -374,29 +397,6 @@ def isInstanceRunning(config):
return sup_process.isRunning(config, 'slapgrid-cp') return sup_process.isRunning(config, 'slapgrid-cp')
def runInstanceWithLock(config, lock=False):
"""
Use Slapgrid to deploy current Software Release and wait until
deployment is done.
"""
if sup_process.isRunning(config, 'slapgrid-cp'):
return 1
startProxy(config)
# XXX Hackish and unreliable
if os.path.exists(config['instance_log']):
os.remove(config['instance_log'])
if not (updateProxy(config) and requestInstance(config)):
return 1
try:
sup_process.runProcess(config, "slapgrid-cp")
if lock:
sup_process.waitForProcessEnd(config, "slapgrid-cp")
return sup_process.returnCode(config, "slapgrid-cp")
except xmlrpclib.Fault:
return 1
def getProfilePath(projectDir, profile): def getProfilePath(projectDir, profile):
""" """
Return the path of the current Software Release `profile` Return the path of the current Software Release `profile`
...@@ -450,10 +450,9 @@ def svcStartAll(config): ...@@ -450,10 +450,9 @@ def svcStartAll(config):
except: except:
pass pass
def removeInstanceRoot(config): def removeInstanceRootDirectory(config):
"""Clean instance directory and stop all its running processes""" """Clean instance directory"""
if os.path.exists(config['instance_root']): if os.path.exists(config['instance_root']):
svcStopAll(config)
for instance_directory in os.listdir(config['instance_root']): for instance_directory in os.listdir(config['instance_root']):
instance_directory = os.path.join(config['instance_root'], instance_directory) instance_directory = os.path.join(config['instance_root'], instance_directory)
# XXX: hardcoded # XXX: hardcoded
...@@ -468,6 +467,27 @@ def removeInstanceRoot(config): ...@@ -468,6 +467,27 @@ def removeInstanceRoot(config):
os.chmod(fullPath, 0744) os.chmod(fullPath, 0744)
shutil.rmtree(instance_directory) shutil.rmtree(instance_directory)
def removeCurrentInstance(config):
if isInstanceRunning(config):
return "Instantiation in progress, cannot remove instance"
# Stop all processes
svcStopAll(config)
if stopProxy(config):
removeProxyDb(config)
else:
return "Something went wrong when trying to stop slapproxy."
# Remove Instance directory and data related to the instance
try:
removeInstanceRootDirectory(config)
param_path = os.path.join(config['etc_dir'], ".parameter.xml")
if os.path.exists(param_path):
os.remove(param_path)
except IOError:
return "The filesystem couldn't been cleaned properly"
return True
def getSvcStatus(config): def getSvcStatus(config):
"""Return all Softwares Instances process Information""" """Return all Softwares Instances process Information"""
...@@ -545,13 +565,7 @@ def configNewSR(config, projectpath): ...@@ -545,13 +565,7 @@ def configNewSR(config, projectpath):
if folder: if folder:
sup_process.stopProcess(config, 'slapgrid-cp') sup_process.stopProcess(config, 'slapgrid-cp')
sup_process.stopProcess(config, 'slapgrid-sr') sup_process.stopProcess(config, 'slapgrid-sr')
stopProxy(config) removeCurrentInstance(config)
removeProxyDb(config)
startProxy(config)
removeInstanceRoot(config)
param_path = os.path.join(config['etc_dir'], ".parameter.xml")
if os.path.exists(param_path):
os.remove(param_path)
open(os.path.join(config['etc_dir'], ".project"), 'w').write(projectpath) open(os.path.join(config['etc_dir'], ".project"), 'w').write(projectpath)
return True return True
else: else:
...@@ -573,8 +587,8 @@ def newSoftware(folder, config, session): ...@@ -573,8 +587,8 @@ def newSoftware(folder, config, session):
folderPath = realpath(config, folder, check_exist=False) folderPath = realpath(config, folder, check_exist=False)
if folderPath and not os.path.exists(folderPath): if folderPath and not os.path.exists(folderPath):
os.mkdir(folderPath) os.mkdir(folderPath)
#load software.cfg and instance.cfg from http://git.erp5.org #load software.cfg and instance.cfg from https://lab.nexedi.com
software = "http://git.erp5.org/gitweb/slapos.git/blob_plain/HEAD:/software/lamp-template/software.cfg" software = "https://lab.nexedi.com/nexedi/slapos/raw/master/software/lamp-template/software.cfg"
softwareContent = "" softwareContent = ""
try: try:
softwareContent = urllib.urlopen(software).read() softwareContent = urllib.urlopen(software).read()
...@@ -589,7 +603,7 @@ def newSoftware(folder, config, session): ...@@ -589,7 +603,7 @@ def newSoftware(folder, config, session):
removeProxyDb(config) removeProxyDb(config)
startProxy(config) startProxy(config)
#Stop runngin process and remove existing instance #Stop runngin process and remove existing instance
removeInstanceRoot(config) removeCurrentInstance(config)
session['title'] = getProjectTitle(config) session['title'] = getProjectTitle(config)
code = 1 code = 1
else: else:
...@@ -632,27 +646,47 @@ def getSoftwareReleaseName(config): ...@@ -632,27 +646,47 @@ def getSoftwareReleaseName(config):
return software.replace(' ', '_') return software.replace(' ', '_')
return "No_name" return "No_name"
def removeSoftwareRootDirectory(config, md5, folder_name):
def removeSoftwareByName(config, md5, folderName): """
"""Remove all content of the software release specified by md5 Removes all content in the filesystem of the software release specified by md5
Args: Args:
config: slaprunner configuration config: slaprunner configuration
foldername: the link name given to the software release folder_name: the link name given to the software release
md5: the md5 filename given by slapgrid to SR folder""" md5: the md5 filename given by slapgrid to SR folder
if isSoftwareRunning(config) or isInstanceRunning(config): """
raise Exception("Software installation or instantiation in progress, cannot remove")
path = os.path.join(config['software_root'], md5) path = os.path.join(config['software_root'], md5)
linkpath = os.path.join(config['software_link'], folderName) linkpath = os.path.join(config['software_link'], folder_name)
if not os.path.exists(path): if not os.path.exists(path):
raise Exception("Cannot remove software Release: No such file or directory") return (0, "Cannot remove software Release: No such file or directory")
if not os.path.exists(linkpath): if not os.path.exists(linkpath):
raise Exception("Cannot remove software Release: No such file or directory %s" % return (0, "Cannot remove software Release: No such file or directory %s" %
('software_root/' + folderName)) ('software_root/' + folder_name))
svcStopAll(config)
os.unlink(linkpath) os.unlink(linkpath)
shutil.rmtree(path) shutil.rmtree(path)
return loadSoftwareRList(config) return
def removeSoftwareByName(config, md5, folder_name):
"""
Removes a software release specified by its md5 and its name from the webrunner.
If the software release is the one of the current running instance, then
the instance should be stopped.
Args:
config: slaprunner configuration
folder_name: the link name given to the software release
md5: the md5 filename given by slapgrid to SR folder
"""
if isSoftwareRunning(config) or isInstanceRunning(config):
return (0, "Software installation or instantiation in progress, cannot remove")
if getSoftwareReleaseName(config) == folder_name:
removeCurrentInstance(config)
result = removeSoftwareRootDirectory(config, md5, folder_name)
if result is not None:
return result
return 1, loadSoftwareRList(config)
def tail(f, lines=20): def tail(f, lines=20):
...@@ -761,16 +795,11 @@ def realpath(config, path, check_exist=True): ...@@ -761,16 +795,11 @@ def realpath(config, path, check_exist=True):
""" """
split_path = path.split('/') split_path = path.split('/')
key = split_path[0] key = split_path[0]
allow_list = { virtual_path_list = ('software_root', 'instance_root', 'workspace',
'software_root': config['software_root'], 'runner_workdir', 'software_link')
'instance_root': config['instance_root'], if key not in virtual_path_list:
'workspace': config['workspace'],
'runner_workdir': config['runner_workdir'],
'software_link': config['software_link']
}
if key not in allow_list:
return '' return ''
allow_list = {path: config[path] for path in virtual_path_list if path in config}
del split_path[0] del split_path[0]
path = os.path.join(allow_list[key], *split_path) path = os.path.join(allow_list[key], *split_path)
if check_exist: if check_exist:
...@@ -811,8 +840,10 @@ def isSoftwareReleaseReady(config): ...@@ -811,8 +840,10 @@ def isSoftwareReleaseReady(config):
"""Return 1 if the Software Release has """Return 1 if the Software Release has
correctly been deployed, 0 if not, correctly been deployed, 0 if not,
and 2 if it is currently deploying""" and 2 if it is currently deploying"""
auto_deploy = config['auto_deploy'] in TRUE_VALUES slapos_software = (False if config.get('slapos-software', None) is None else True)
auto_run = config['autorun'] in TRUE_VALUES # auto_deploy and auto_run are True only if slapos_software has been declared
auto_deploy = (config['auto_deploy'] in TRUE_VALUES) and slapos_software
auto_run = (config['autorun'] in TRUE_VALUES) and slapos_software
project = os.path.join(config['etc_dir'], '.project') project = os.path.join(config['etc_dir'], '.project')
if not ( os.path.exists(project) and (auto_run or auto_deploy) ): if not ( os.path.exists(project) and (auto_run or auto_deploy) ):
return "0" return "0"
......
...@@ -25,7 +25,7 @@ from slapos.runner.utils import (checkSoftwareFolder, configNewSR, checkUserCred ...@@ -25,7 +25,7 @@ from slapos.runner.utils import (checkSoftwareFolder, configNewSR, checkUserCred
isSoftwareRunning, isSoftwareReleaseReady, isText, isSoftwareRunning, isSoftwareReleaseReady, isText,
loadSoftwareRList, md5sum, newSoftware, loadSoftwareRList, md5sum, newSoftware,
readFileFrom, readParameters, realpath, readFileFrom, readParameters, realpath,
removeInstanceRoot, removeProxyDb, removeCurrentInstance,
removeSoftwareByName, runSlapgridUntilSuccess, removeSoftwareByName, runSlapgridUntilSuccess,
saveBuildAndRunParams, saveBuildAndRunParams,
setMiniShellHistory, setMiniShellHistory,
...@@ -216,17 +216,9 @@ def supervisordStatus(): ...@@ -216,17 +216,9 @@ def supervisordStatus():
def removeInstance(): def removeInstance():
if isInstanceRunning(app.config): result = removeCurrentInstance(app.config)
flash('Instantiation in progress, cannot remove') if isinstance(result, str):
else: flash(result)
removeProxyDb(app.config)
stopProxy(app.config)
svcStopAll(app.config) # Stop All instance process
removeInstanceRoot(app.config)
param_path = os.path.join(app.config['etc_dir'], ".parameter.xml")
if os.path.exists(param_path):
os.remove(param_path)
flash('Instance removed')
return redirect(url_for('inspectInstance')) return redirect(url_for('inspectInstance'))
...@@ -375,12 +367,9 @@ def removeFile(): ...@@ -375,12 +367,9 @@ def removeFile():
def removeSoftwareDir(): def removeSoftwareDir():
try: status, message = removeSoftwareByName(app.config, request.form['md5'],
data = removeSoftwareByName(app.config, request.form['md5'], request.form['title'])
request.form['title']) return jsonify(code=status, result=message)
return jsonify(code=1, result=data)
except Exception as e:
return jsonify(code=0, result=str(e))
#read file and return content to ajax #read file and return content to ajax
......
import datetime
import feedparser
import time
import unittest
import PyRSS2Gen as RSS2
from slapos.checkfeedaspromise import checkFeedAsPromise
class Option(dict):
def __init__(self, **kw):
self.__dict__.update(kw)
def __setitem__(i, y):
self.__dict__[i] = y
class TestCheckFeedAsPromise(unittest.TestCase):
def getOptionObject(self, **kw):
"""
Returns an object containing options as properties, to simulate a call
to the tested script
"""
option = {
'title': False,
'description': False,
'time_buffer': 0,
'ok_pattern_list': [],
'ko_pattern_list': [],
}
option.update(kw)
return Option(**option)
def generateFeed(self, item_list):
return RSS2.RSS2(
title="Feed Title",
link="http://exemple.com",
description="Feed Description",
items=[RSS2.RSSItem(**item) for item in item_list]
).to_xml()
def generateOKFeed(self, extra_item_list=None):
item_list = [{
'title': 'Doing Something',
'description': 'work work work',
'pubDate': datetime.datetime.now(),
}, {
'title': 'Something Finished: OK',
'description': 'OK FINISHED DONE BASTA',
'pubDate': datetime.datetime.now(),
}]
if isinstance(extra_item_list, list):
item_list.append(extra_item_list)
return self.generateFeed(item_list)
def generateKOFeed(self, extra_item_list=None):
item_list = [{
'title': 'Doing Something',
'description': 'work work work',
'pubDate': datetime.datetime.now(),
}, {
'title': 'Something Finished: Error',
'description': 'FAILURE oops Arghh',
'pubDate': datetime.datetime.now(),
}]
if isinstance(extra_item_list, list):
item_list.extend(extra_item_list)
return self.generateFeed(item_list)
def test_ifOKFoundNoErrorReturned(self):
option = self.getOptionObject()
option.title = True
feed = self.generateOKFeed()
option.ok_pattern_list = ['OK']
self.assertEquals(checkFeedAsPromise(feed, option), "")
option.title, option.description = False, True
option.ok_pattern_list = ['DONE', 'OK']
self.assertEquals(checkFeedAsPromise(feed, option), "")
def test_ifKOFoundErrorReturned(self):
option = self.getOptionObject()
option.title = True
feed = self.generateKOFeed()
option.ko_pattern_list = ['Error']
self.assertNotEquals(checkFeedAsPromise(feed, option), "")
option.title, option.description = False, True
option.ko_pattern_list = ['FAILURE', 'Error']
self.assertNotEquals(checkFeedAsPromise(feed, option), "")
def test_ifNoOKPatternFoundErrorIsRaised(self):
option = self.getOptionObject()
option.title = True
feed = self.generateKOFeed()
# If no time buffer, then not OK is always wrong
option.ok_pattern_list = ['OK']
self.assertNotEquals(len(checkFeedAsPromise(feed, option)), 0)
# if time buffer, then not OK is wrong only after buffer expires
extra_item = {
'title': 'Something is Starting',
'description': 'Very long operation, but should last less than 1h',
'pubDate': datetime.datetime.now() - datetime.timedelta(seconds=3600),
}
feed = self.generateKOFeed([extra_item,])
option.time_buffer = 4000
# buffer longer than last item's age
self.assertEquals(checkFeedAsPromise(feed, option), "")
# shorter buffer, we want to raise an error
option.time_buffer = 1800
self.assertNotEquals(len(checkFeedAsPromise(feed, option)), 0)
def test_noItemInTheFeedIsNotAnError(self):
option = self.getOptionObject()
option.title = True
feed = self.generateFeed([])
self.assertEquals(checkFeedAsPromise(feed, option), "")
if __name__ == '__main__':
unittest.main()
import collections
import datetime
import feedparser
import json
import os
import shutil
import tempfile
import time
import unittest
from slapos.generatefeed import generateFeed
class Option(dict):
def __init__(self, **kw):
self.__dict__.update(kw)
def __setitem__(i, y):
self.__dict__[i] = y
class TestGenerateFeed(unittest.TestCase):
def setUp(self):
self.item_directory = tempfile.mkdtemp(dir='.')
self.feed_path = os.path.join(self.item_directory, 'path')
def tearDown(self):
shutil.rmtree(self.item_directory)
def getOptionObject(self, **kw):
"""
Returns an object containing options as properties, to simulate a call
to the tested script
"""
option = {
'output': self.feed_path,
'status_item_path': self.item_directory,
'max_item': 50,
'feed_title': 'Feed title',
'feed_link': 'http://example.com',
'feed_description': 'Feed description',
}
option.update(kw)
return Option(**option)
def saveAsStatusItem(self, filename, content):
"""
Save a JSON at filename in self.item_directory as a status item
"""
path = os.path.join(self.item_directory, filename)
with open(path, 'w') as status_file:
status_file.write(json.dumps(content))
def createItemSample(self):
"""
Populate item_directory with a few sample items
"""
item = [
# Last in alphabet, first in pubDate
('zzz.item',
{'description': 'description is OK too',
'link': "http://example.com",
'pubDate': time.mktime(datetime.datetime(2000, 1, 1).timetuple()),
'title': 'everything is OK',
}),
# First in pubDate, last in alphabet
('aaa.item',
{'description': 'what went wrong ?',
'link': "http://example.com",
'pubDate': time.mktime(datetime.datetime(2000, 12, 31).timetuple()),
'title': 'I guess we have an ERROR',
}),
]
for filename, content in item:
self.saveAsStatusItem(filename, content)
def test_feedItemsAreSortedByDate(self):
self.createItemSample()
option = self.getOptionObject()
content_feed = generateFeed(option)
feed = feedparser.parse(content_feed)
self.assertFalse(feed.bozo)
start_date = None
for item in feed.entries:
if start_date is None:
start_date = item.published_parsed
self.assertLessEqual(start_date, item.published_parsed)
def test_generateFeedCleanStatusDirectoryIfTooManyItems(self):
option = self.getOptionObject()
option.max_item = 10
# Creates items more than allowed
item_dummy_content = {
'description': 'dummy description',
'link': "http://example.com",
'pubDate': time.mktime(datetime.datetime.now().timetuple()),
'title': 'dummy title',
}
for i in range(15):
filename = '%s.item' % i
self.saveAsStatusItem(filename, item_dummy_content)
content_feed = generateFeed(option)
feed = feedparser.parse(content_feed)
self.assertFalse(feed.bozo)
# Feed entries number should be limited
self.assertEqual(len(feed.entries), option.max_item)
# Status item directory should have been cleaned
self.assertEqual(len(os.listdir(self.item_directory)), option.max_item)
if __name__ == '__main__':
unittest.main()
import csv
import feedparser
import os
import shutil
import tempfile
import unittest
from slapos.resilient.rdiffBackupStat2RSS import genRSS
class Option(dict):
def __init__(self, **kw):
self.__dict__.update(kw)
def __setitem__(i, y):
self.__dict__[i] = y
class TestRdiffBackupStat2RSS(unittest.TestCase):
def setUp(self):
self.data_directory = tempfile.mkdtemp(dir='.')
self.feed_path = os.path.join(self.data_directory)
def tearDown(self):
shutil.rmtree(self.data_directory)
def getOptionObject(self, **kw):
"""
Returns an object containing options as properties, to simulate a call
to the tested script
"""
option = {
'rdiff_backup_data_folder': self.data_directory,
'output': self.feed_path,
'feed_url': 'http://exemple.com',
}
option.update(kw)
return Option(**option)
def createSample(self):
"""
Writes 2 statistics file in rdiff-backup format
"""
with open(os.path.join(self.data_directory, 'session_statistics_1'), 'w') as stat_file:
stat_file.write("""\
StartTime 1473339659.00 (Thu Sep 8 15:00:59 2016)
EndTime 1473339667.81 (Thu Sep 8 15:01:07 2016)
ElapsedTime 8.81 (8.81 seconds)
SourceFiles 2381
SourceFileSize 142096473 (136 MB)
MirrorFiles 1
MirrorFileSize 0 (0 bytes)
NewFiles 2380
NewFileSize 142096473 (136 MB)
DeletedFiles 0
DeletedFileSize 0 (0 bytes)
ChangedFiles 1
ChangedSourceSize 0 (0 bytes)
ChangedMirrorSize 0 (0 bytes)
IncrementFiles 0
IncrementFileSize 0 (0 bytes)
TotalDestinationSizeChange 142096473 (136 MB)
Errors 0""")
with open(os.path.join(self.data_directory, 'session_statistics_2'), 'w') as stat_file:
stat_file.write("""\
StartTime 1473340154.00 (Thu Sep 8 15:09:14 2016)
EndTime 1473340154.95 (Thu Sep 8 15:09:14 2016)
ElapsedTime 0.95 (0.95 seconds)
SourceFiles 2381
SourceFileSize 142096473 (136 MB)
MirrorFiles 2381
MirrorFileSize 142096473 (136 MB)
NewFiles 0
NewFileSize 0 (0 bytes)
DeletedFiles 0
DeletedFileSize 0 (0 bytes)
ChangedFiles 15
ChangedSourceSize 230112 (225 KB)
ChangedMirrorSize 230112 (225 KB)
IncrementFiles 15
IncrementFileSize 2122 (2.07 KB)
TotalDestinationSizeChange 2122 (2.07 KB)
Errors 0""")
def test_generatedRSSIsCorrect(self):
self.createSample()
option = self.getOptionObject()
feed_content = genRSS(option)
feed = feedparser.parse(feed_content)
self.assertFalse(feed.bozo)
self.assertTrue(len(feed.entries), 2)
self.assertLess(feed.entries[0].published_parsed, feed.entries[1].published_parsed)
if __name__ == '__main__':
unittest.main()
import mock
import os
import string
import random
import supervisor
import thread
import unittest
import slapos.runner.utils as runner_utils
import sys
sys.modules['slapos.runner.utils'].sup_process = mock.MagicMock()
class TestRunnerBackEnd(unittest.TestCase):
def setUp(self):
self.sup_process = runner_utils.sup_process
self.sup_process.reset_mock()
runner_utils.open = open
def tearDown(self):
garbage_file_list = [
os.path.join(*(os.getcwd(), '.htpasswd')),
os.path.join(*(os.getcwd(), '.turn-left')),
os.path.join(*(os.getcwd(), 'slapos-test.cfg')),
]
for garbage_file in garbage_file_list:
if os.path.exists(garbage_file):
os.remove(garbage_file)
def _startSupervisord(self):
cwd = os.getcwd()
supervisord_config_file = os.path.join(cwd, 'supervisord.conf')
open(supervisord_config_file, 'w').write("""
""")
supervisord = supervisor.supervisord.Supervisord('-c', supervisord_config_file)
thread.start_new_thread()
def test_UserCanLoginAndUpdateCredentials(self):
"""
* Create a user with createNewUser
* Tests user can login with checkUserCredential
* Updates user password updateUserCredential
* Checks user can login with new credentials
"""
def generate_password():
return "".join(random.sample( \
string.ascii_letters + string.digits + string.punctuation, 20))
config = {'etc_dir': os.getcwd()}
login = "admin"
password = generate_password()
self.assertTrue(runner_utils.createNewUser(config, login, password))
self.assertTrue(runner_utils.checkUserCredential(config, login, password))
new_password = generate_password()
self.assertNotEqual(password, new_password)
runner_utils.updateUserCredential(config, login, new_password)
self.assertTrue(runner_utils.checkUserCredential(config, login, new_password))
@mock.patch('os.path.exists')
def test_getCurrentSoftwareReleaseProfile(self, mock_path_exists):
"""
* Mock a .project file
* Tests that getCurrentSoftwareReleaseProfile returns an absolute path
"""
cwd = os.getcwd()
# If .project file doesn't exist, then getCurrentSoftwareReleaseProfile
# returns an empty string
config = {'etc_dir': os.path.join(cwd, 'etc'),
'workspace': os.path.join(cwd, 'srv', 'runner'),
'software_profile': 'software.cfg'}
profile = runner_utils.getCurrentSoftwareReleaseProfile(config)
self.assertEqual(profile, "")
# If .project points to a SR that doesn't exist, returns empty string
runner_utils.open = mock.mock_open(read_data="workspace/fake/path/")
mock_path_exists.return_value = False
profile = runner_utils.getCurrentSoftwareReleaseProfile(config)
self.assertEqual(profile, "")
# If software_profile exists, getCurrentSoftwareReleaseProfile should
# return its absolute path
runner_utils.open = mock.mock_open(read_data = "workspace/project/software/")
mock_path_exists.return_value = True
profile = runner_utils.getCurrentSoftwareReleaseProfile(config)
self.assertEqual(profile, os.path.join(config['workspace'], 'project',
'software', config['software_profile']))
@mock.patch('os.mkdir')
@mock.patch('slapos.runner.utils.updateProxy')
@mock.patch('slapos.runner.utils.config_SR_folder')
def _runSlapgridWithLockMakesCorrectCallsToSupervisord(self,
run_slapgrid_function,
process_name,
mock_configSRFolder,
mock_updateProxy,
mock_mkdir):
"""
Tests that runSoftwareWithLock and runInstanceWithLock make correct calls
to sup_process (= supervisord)
"""
mock_updateProxy.return_value = True
cwd = os.getcwd()
config = {'software_root': os.path.join(cwd, 'software'),
'software_log': os.path.join(cwd, 'software.log'),
'instance_root': os.path.join(cwd, 'software'),
'instance_log': os.path.join(cwd, 'software.log')}
# If process is already running, then does nothing
self.sup_process.isRunning.return_value = True
self.assertEqual(run_slapgrid_function(config), 1)
self.assertFalse(self.sup_process.runProcess.called)
# If the slapgrid process is not running, it should start it
self.sup_process.isRunning.return_value = False
# First, without Lock
run_slapgrid_function(config)
self.sup_process.runProcess.assert_called_once_with(config, process_name)
self.assertFalse(self.sup_process.waitForProcessEnd.called)
# Second, with Lock
self.sup_process.reset_mock()
run_slapgrid_function(config, lock=True)
self.sup_process.runProcess.assert_called_once_with(config, process_name)
self.sup_process.waitForProcessEnd.assert_called_once_with(config, process_name)
def test_runSoftwareWithLockMakesCorrectCallstoSupervisord(self):
self._runSlapgridWithLockMakesCorrectCallsToSupervisord(
runner_utils.runSoftwareWithLock, 'slapgrid-sr')
def test_runInstanceWithLockMakesCorrectCallstoSupervisord(self):
self._runSlapgridWithLockMakesCorrectCallsToSupervisord(
runner_utils.runInstanceWithLock, 'slapgrid-cp')
@mock.patch('os.path.exists')
@mock.patch('os.remove')
@mock.patch('slapos.runner.utils.startProxy')
@mock.patch('slapos.runner.utils.stopProxy')
@mock.patch('slapos.runner.utils.removeProxyDb')
def test_changingSRUpdatesProjectFileWithExistingPath(self,
mock_removeProxyDb,
mock_stopProxy,
mock_startProxy,
mock_remove,
mock_path_exists):
cwd = os.getcwd()
config = {'etc_dir' : os.path.join(cwd, 'etc'),
'workspace': os.path.join(cwd, 'srv', 'runner')}
projectpath = 'workspace/project/software/'
self.assertNotEqual(runner_utils.realpath(config, projectpath, \
check_exist=False), '')
# If projectpath doesn't exist, .project file shouldn't be written
mock_path_exists.return_value = False
result = runner_utils.configNewSR(config, projectpath)
self.assertFalse(result)
# If projectpath exist, .project file should be overwritten
mock_path_exists.return_value = True
runner_utils.open = mock.mock_open()
result = runner_utils.configNewSR(config, projectpath)
self.assertTrue(result)
runner_utils.open.assert_has_calls([mock.call().write(projectpath)])
@mock.patch('slapos.runner.utils.isInstanceRunning')
@mock.patch('slapos.runner.utils.svcStopAll')
def test_removingInstanceStopsProcessesAndCleansInstanceDirectory(self,
mock_svcStopAll,
mock_isInstanceRunning):
"""
When removing the current running instances, processes should be stopped
and directories deleted properly
"""
cwd = os.getcwd()
config = {'database_uri': os.path.join(cwd, 'proxy.db'),
'etc_dir': os.path.join(cwd, 'etc'),
'instance_root': os.path.join(cwd, 'instance'),}
# If slapos node is running, removeCurrentInstance returns a string
mock_isInstanceRunning.return_value = True
self.assertTrue(isinstance(runner_utils.removeCurrentInstance(config), str))
self.assertTrue(mock_isInstanceRunning.called)
# If slapos is not running, process should be stopped and directories emptied
mock_isInstanceRunning.return_value = False
result = runner_utils.removeCurrentInstance(config)
self.assertTrue(mock_svcStopAll.called)
self.sup_process.stopProcess.assert_called_with(config, 'slapproxy')
@mock.patch('os.listdir')
@mock.patch('os.path.exists')
@mock.patch('slapos.runner.utils.removeCurrentInstance')
@mock.patch('slapos.runner.utils.removeSoftwareRootDirectory')
def test_removingUsedSoftwareReleaseCleansInstancesToo(self,
mock_removeSoftwareRootDirectory,
mock_removeCurrentInstance,
mock_path_exists,
mock_listdir):
"""
When removing the Software Release on which depends the current running
instances, the current instances should be stopped and removed properly.
"""
# mock_listir is needed for not raising in loadSoftwareRList or future equivalent
mock_listdir.return_value = []
cwd = os.getcwd()
config = {'etc_dir': os.path.join(cwd, 'etc'),
'software_root': os.path.join(cwd, 'software'),
'software_link': os.path.join(cwd, 'softwareLink'),}
self.sup_process.isRunning.return_value = False
# First tests that if the current instance doesn't extend the Software
# Release to delete, the instance isn't deleted
runner_utils.open = mock.mock_open(read_data="/workspace/software/another/")
runner_utils.removeSoftwareByName(config, '1234567890', 'my_software_name')
self.assertFalse(mock_removeCurrentInstance.called)
self.assertTrue(mock_removeSoftwareRootDirectory.called)
# If the current Instance extends the Software Release, then both must
# be removed
mock_removeSoftwareRootDirectory.reset_mock()
runner_utils.open = mock.mock_open(read_data="/workspace/software/my_software_name/")
runner_utils.removeSoftwareByName(config, '1234567890', 'my_software_name')
self.assertTrue(mock_removeCurrentInstance.called)
self.assertTrue(mock_removeSoftwareRootDirectory.called)
@mock.patch('slapos.runner.utils.runInstanceWithLock')
@mock.patch('slapos.runner.utils.runSoftwareWithLock')
def test_runSoftwareRunOnlyOnceIfSoftwareSuccessfullyCompiledOnFirstTime(self,
mock_runSoftwareWithLock,
mock_runInstanceWithLock):
cwd = os.getcwd()
config = {'runner_workdir': cwd,
'etc_dir': cwd}
build_and_run_parameter_dict = {
'run_instance': False,
'run_software': True,
'max_run_instance': 3,
'max_run_software': 3,
}
runner_utils.saveBuildAndRunParams(config, build_and_run_parameter_dict)
# First, configuration is set to only run the compilation of the software release
# Both runSoftwareWithLock and runInstanceWithLock succeed on 1st try
mock_runSoftwareWithLock.return_value = 0
mock_runInstanceWithLock.return_value = 0
runner_utils.runSlapgridUntilSuccess(config, 'software')
self.assertEqual(mock_runSoftwareWithLock.call_count, 1)
self.assertEqual(mock_runInstanceWithLock.call_count, 0)
# Second, instanciation should start if compilation succeeded
mock_runSoftwareWithLock.reset_mock()
build_and_run_parameter_dict.update({'run_instance': True})
runner_utils.saveBuildAndRunParams(config, build_and_run_parameter_dict)
runner_utils.runSlapgridUntilSuccess(config, 'software')
self.assertEqual(mock_runSoftwareWithLock.call_count, 1)
self.assertEqual(mock_runInstanceWithLock.call_count, 1)
@mock.patch('slapos.runner.utils.runInstanceWithLock')
@mock.patch('slapos.runner.utils.runSoftwareWithLock')
def test_runSoftwareDonotRestartForeverEvenIfBuildoutFileIsWrong(self,
mock_runSoftwareWithLock,
mock_runInstanceWithLock):
"""
Restarting compilation or instanciation should happen a limited number of
times to prevent useless runs due to a mistaken buildout config.
"""
cwd = os.getcwd()
config = {'runner_workdir': cwd,
'etc_dir': cwd}
build_and_run_parameter_dict = {
'run_instance': True,
'run_software': True,
'max_run_instance': 3,
'max_run_software': 3,
}
runner_utils.saveBuildAndRunParams(config, build_and_run_parameter_dict)
# runSoftwareWithLock always fail and runInstanceWithLock succeeds on 1st try
mock_runSoftwareWithLock.return_value = 1
mock_runInstanceWithLock.return_value = 0
runner_utils.runSlapgridUntilSuccess(config, 'software')
self.assertEqual(mock_runSoftwareWithLock.call_count,
build_and_run_parameter_dict['max_run_software'])
# if running software fails, then no need to try to deploy instances
self.assertEqual(mock_runInstanceWithLock.call_count, 0)
@unittest.skip('No scenario defined')
def test_autoDeployWontEraseExistingInstances(self):
raise NotImplementedError
@unittest.skip('No scenario defined')
def test_requestingInstanceCorrectlyPassesTypeAndParameters(self):
raise NotImplementedError
@unittest.skip('No scenario defined')
def test_parametersAreCorrectlyUpdatedAndGivenToTheInstance(self):
raise NotImplementedError
if __name__ == '__main__':
random.seed()
unittest.main()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment