Intial Commit

Change-Id: I2066c7580d8c69548982e9ffda8a50577f6a5a08
This commit is contained in:
hosingh000 2017-06-01 12:49:41 -05:00
parent f2b43d1100
commit f19f33bdec
108 changed files with 8456 additions and 0 deletions

13
.gitignore vendored Normal file
View File

@ -0,0 +1,13 @@
.idea
*.pyc
*.log
.venv
.testrepository
.project
.pydevproject
build
dist
ord.egg-info/
ranger_agent.egg-info
.settings/
.tox/

8
.testr.conf Normal file
View File

@ -0,0 +1,8 @@
[DEFAULT]
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-160} \
${PYTHON:-python} -m subunit.run discover -t ./ ${OS_TEST_PATH:-./ord/tests} $LISTOPT $IDOPTION
test_id_option=--load-list $IDFILE
test_list_option=--list

1
AUTHORS Normal file
View File

@ -0,0 +1 @@
hosingh000 <hosingh000@gmail.com>

3
CHANGES Normal file
View File

@ -0,0 +1,3 @@
CHANGES
=======
* Initial creation

4
HACKING.rst Normal file
View File

@ -0,0 +1,4 @@
ranger-agent Style Commandments
===============================================
Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/

8
MANIFEST.in Normal file
View File

@ -0,0 +1,8 @@
include AUTHORS
include README.rst
include ChangeLog
exclude .gitignore
exclude .gitreview
global-exclude *.pyc

7
README Normal file
View File

@ -0,0 +1,7 @@
This is the ORD project. At the highest view, provides an API interface
for users to move OpenStack templates from CodeCloud into OS Heat.
This project is designed to show a recommended set of modules
for creating a simple API server and Engine, with a versioned API, clean separation
of modules, a standard way of structuring both unit and functional tests,
configuration files,and documentation.

12
README.rst Normal file
View File

@ -0,0 +1,12 @@
===============================
Ranger-Agent
===============================
Ranger Resource Distributor
* TODO
Features
--------
* TODO

1
babel.cfg Normal file
View File

@ -0,0 +1 @@
[python: **.py]

97
doc/Makefile Normal file
View File

@ -0,0 +1,97 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
SPHINXSOURCE = source
PAPER =
BUILDDIR = build
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SPHINXSOURCE)
.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
.DEFAULT_GOAL = html
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
-rm -rf $(BUILDDIR)/*
-rm -rf ranger-agent.sqlite
if [ -f .autogenerated ] ; then \
cat .autogenerated | xargs rm ; \
rm .autogenerated ; \
fi
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/ranger-agent.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/ranger-agent.qhc"
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
"run these through (pdf)latex."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."

View File

@ -0,0 +1,16 @@
{
"ord-notifier-response": {
"request-id": "1",
"resource-id": "1",
"resource-type": "customer",
"resource-template-version": "1",
"resource-template-name": "template.yaml",
"resource-template-type": "hot",
"operation": "create",
"ord-notifier-id": "1",
"region": "X" ,
"status": "submitted",
"error-code": "",
"error-msg": ""
}
}

View File

@ -0,0 +1,12 @@
{
"ord-notifier": {
"request-id": "1",
"resource-id": "1",
"resource-type": "customer",
"resource-template-version": "1",
"resource-template-name": "template.yaml",
"resource-template-type": "hot",
"operation": "create",
"region": "X"
}
}

View File

@ -0,0 +1,16 @@
{
"rds-listener": {
"request-id": "1",
"resource-id": "1",
"resource-type": "customer",
"resource-template-version": "1",
"resource-template-name": "template.yaml",
"resource-template-type": "hot",
"operation": "create",
"ord-notifier-id": "1",
"region": "X" ,
"status": "completed",
"error-code": "",
"error-msg": ""
}
}

View File

@ -0,0 +1,416 @@
/**
* Sphinx stylesheet -- basic theme
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
/* -- main layout ----------------------------------------------------------- */
div.clearer {
clear: both;
}
/* -- relbar ---------------------------------------------------------------- */
div.related {
width: 100%;
font-size: 90%;
}
div.related h3 {
display: none;
}
div.related ul {
margin: 0;
padding: 0 0 0 10px;
list-style: none;
}
div.related li {
display: inline;
}
div.related li.right {
float: right;
margin-right: 5px;
}
/* -- sidebar --------------------------------------------------------------- */
div.sphinxsidebarwrapper {
padding: 10px 5px 0 10px;
}
div.sphinxsidebar {
float: left;
width: 230px;
margin-left: -100%;
font-size: 90%;
}
div.sphinxsidebar ul {
list-style: none;
}
div.sphinxsidebar ul ul,
div.sphinxsidebar ul.want-points {
margin-left: 20px;
list-style: square;
}
div.sphinxsidebar ul ul {
margin-top: 0;
margin-bottom: 0;
}
div.sphinxsidebar form {
margin-top: 10px;
}
div.sphinxsidebar input {
border: 1px solid #98dbcc;
font-family: sans-serif;
font-size: 1em;
}
img {
border: 0;
}
/* -- search page ----------------------------------------------------------- */
ul.search {
margin: 10px 0 0 20px;
padding: 0;
}
ul.search li {
padding: 5px 0 5px 20px;
background-image: url(file.png);
background-repeat: no-repeat;
background-position: 0 7px;
}
ul.search li a {
font-weight: bold;
}
ul.search li div.context {
color: #888;
margin: 2px 0 0 30px;
text-align: left;
}
ul.keywordmatches li.goodmatch a {
font-weight: bold;
}
/* -- index page ------------------------------------------------------------ */
table.contentstable {
width: 90%;
}
table.contentstable p.biglink {
line-height: 150%;
}
a.biglink {
font-size: 1.3em;
}
span.linkdescr {
font-style: italic;
padding-top: 5px;
font-size: 90%;
}
/* -- general index --------------------------------------------------------- */
table.indextable td {
text-align: left;
vertical-align: top;
}
table.indextable dl, table.indextable dd {
margin-top: 0;
margin-bottom: 0;
}
table.indextable tr.pcap {
height: 10px;
}
table.indextable tr.cap {
margin-top: 10px;
background-color: #f2f2f2;
}
img.toggler {
margin-right: 3px;
margin-top: 3px;
cursor: pointer;
}
/* -- general body styles --------------------------------------------------- */
a.headerlink {
visibility: hidden;
}
h1:hover > a.headerlink,
h2:hover > a.headerlink,
h3:hover > a.headerlink,
h4:hover > a.headerlink,
h5:hover > a.headerlink,
h6:hover > a.headerlink,
dt:hover > a.headerlink {
visibility: visible;
}
div.body p.caption {
text-align: inherit;
}
div.body td {
text-align: left;
}
.field-list ul {
padding-left: 1em;
}
.first {
}
p.rubric {
margin-top: 30px;
font-weight: bold;
}
/* -- sidebars -------------------------------------------------------------- */
div.sidebar {
margin: 0 0 0.5em 1em;
border: 1px solid #ddb;
padding: 7px 7px 0 7px;
background-color: #ffe;
width: 40%;
float: right;
}
p.sidebar-title {
font-weight: bold;
}
/* -- topics ---------------------------------------------------------------- */
div.topic {
border: 1px solid #ccc;
padding: 7px 7px 0 7px;
margin: 10px 0 10px 0;
}
p.topic-title {
font-size: 1.1em;
font-weight: bold;
margin-top: 10px;
}
/* -- admonitions ----------------------------------------------------------- */
div.admonition {
margin-top: 10px;
margin-bottom: 10px;
padding: 7px;
}
div.admonition dt {
font-weight: bold;
}
div.admonition dl {
margin-bottom: 0;
}
p.admonition-title {
margin: 0px 10px 5px 0px;
font-weight: bold;
}
div.body p.centered {
text-align: center;
margin-top: 25px;
}
/* -- tables ---------------------------------------------------------------- */
table.docutils {
border: 0;
border-collapse: collapse;
}
table.docutils td, table.docutils th {
padding: 1px 8px 1px 0;
border-top: 0;
border-left: 0;
border-right: 0;
border-bottom: 1px solid #aaa;
}
table.field-list td, table.field-list th {
border: 0 !important;
}
table.footnote td, table.footnote th {
border: 0 !important;
}
th {
text-align: left;
padding-right: 5px;
}
/* -- other body styles ----------------------------------------------------- */
dl {
margin-bottom: 15px;
}
dd p {
margin-top: 0px;
}
dd ul, dd table {
margin-bottom: 10px;
}
dd {
margin-top: 3px;
margin-bottom: 10px;
margin-left: 30px;
}
dt:target, .highlight {
background-color: #fbe54e;
}
dl.glossary dt {
font-weight: bold;
font-size: 1.1em;
}
.field-list ul {
margin: 0;
padding-left: 1em;
}
.field-list p {
margin: 0;
}
.refcount {
color: #060;
}
.optional {
font-size: 1.3em;
}
.versionmodified {
font-style: italic;
}
.system-message {
background-color: #fda;
padding: 5px;
border: 3px solid red;
}
.footnote:target {
background-color: #ffa
}
.line-block {
display: block;
margin-top: 1em;
margin-bottom: 1em;
}
.line-block .line-block {
margin-top: 0;
margin-bottom: 0;
margin-left: 1.5em;
}
/* -- code displays --------------------------------------------------------- */
pre {
overflow: auto;
}
td.linenos pre {
padding: 5px 0px;
border: 0;
background-color: transparent;
color: #aaa;
}
table.highlighttable {
margin-left: 0.5em;
}
table.highlighttable td {
padding: 0 0.5em 0 0.5em;
}
tt.descname {
background-color: transparent;
font-weight: bold;
font-size: 1.2em;
}
tt.descclassname {
background-color: transparent;
}
tt.xref, a tt {
background-color: transparent;
font-weight: bold;
}
h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
background-color: transparent;
}
/* -- math display ---------------------------------------------------------- */
img.math {
vertical-align: middle;
}
div.body div.math p {
text-align: center;
}
span.eqno {
float: right;
}
/* -- printout stylesheet --------------------------------------------------- */
@media print {
div.document,
div.documentwrapper,
div.bodywrapper {
margin: 0 !important;
width: 100%;
}
div.sphinxsidebar,
div.related,
div.footer,
#top-link {
display: none;
}
}

View File

@ -0,0 +1,230 @@
/**
* Sphinx stylesheet -- default theme
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
@import url("basic.css");
/* -- page layout ----------------------------------------------------------- */
body {
font-family: sans-serif;
font-size: 100%;
background-color: #11303d;
color: #000;
margin: 0;
padding: 0;
}
div.document {
background-color: #1c4e63;
}
div.documentwrapper {
float: left;
width: 100%;
}
div.bodywrapper {
margin: 0 0 0 230px;
}
div.body {
background-color: #ffffff;
color: #000000;
padding: 0 20px 30px 20px;
}
div.footer {
color: #ffffff;
width: 100%;
padding: 9px 0 9px 0;
text-align: center;
font-size: 75%;
}
div.footer a {
color: #ffffff;
text-decoration: underline;
}
div.related {
background-color: #133f52;
line-height: 30px;
color: #ffffff;
}
div.related a {
color: #ffffff;
}
div.sphinxsidebar {
}
div.sphinxsidebar h3 {
font-family: 'Trebuchet MS', sans-serif;
color: #ffffff;
font-size: 1.4em;
font-weight: normal;
margin: 0;
padding: 0;
}
div.sphinxsidebar h3 a {
color: #ffffff;
}
div.sphinxsidebar h4 {
font-family: 'Trebuchet MS', sans-serif;
color: #ffffff;
font-size: 1.3em;
font-weight: normal;
margin: 5px 0 0 0;
padding: 0;
}
div.sphinxsidebar p {
color: #ffffff;
}
div.sphinxsidebar p.topless {
margin: 5px 10px 10px 10px;
}
div.sphinxsidebar ul {
margin: 10px;
padding: 0;
color: #ffffff;
}
div.sphinxsidebar a {
color: #98dbcc;
}
div.sphinxsidebar input {
border: 1px solid #98dbcc;
font-family: sans-serif;
font-size: 1em;
}
/* -- body styles ----------------------------------------------------------- */
a {
color: #355f7c;
text-decoration: none;
}
a:hover {
text-decoration: underline;
}
div.body p, div.body dd, div.body li {
text-align: left;
line-height: 130%;
}
div.body h1,
div.body h2,
div.body h3,
div.body h4,
div.body h5,
div.body h6 {
font-family: 'Trebuchet MS', sans-serif;
background-color: #f2f2f2;
font-weight: normal;
color: #20435c;
border-bottom: 1px solid #ccc;
margin: 20px -20px 10px -20px;
padding: 3px 0 3px 10px;
}
div.body h1 { margin-top: 0; font-size: 200%; }
div.body h2 { font-size: 160%; }
div.body h3 { font-size: 140%; }
div.body h4 { font-size: 120%; }
div.body h5 { font-size: 110%; }
div.body h6 { font-size: 100%; }
a.headerlink {
color: #c60f0f;
font-size: 0.8em;
padding: 0 4px 0 4px;
text-decoration: none;
}
a.headerlink:hover {
background-color: #c60f0f;
color: white;
}
div.body p, div.body dd, div.body li {
text-align: left;
line-height: 130%;
}
div.admonition p.admonition-title + p {
display: inline;
}
div.admonition p {
margin-bottom: 5px;
}
div.admonition pre {
margin-bottom: 5px;
}
div.admonition ul, div.admonition ol {
margin-bottom: 5px;
}
div.note {
background-color: #eee;
border: 1px solid #ccc;
}
div.seealso {
background-color: #ffc;
border: 1px solid #ff6;
}
div.topic {
background-color: #eee;
}
div.warning {
background-color: #ffe4e4;
border: 1px solid #f66;
}
p.admonition-title {
display: inline;
}
p.admonition-title:after {
content: ":";
}
pre {
padding: 5px;
background-color: #eeffcc;
color: #333333;
line-height: 120%;
border: 1px solid #ac9;
border-left: none;
border-right: none;
}
tt {
background-color: #ecf0f3;
padding: 0 1px 0 1px;
font-size: 0.95em;
}
.warning tt {
background: #efc2c2;
}
.note tt {
background: #d6d6d6;
}

View File

@ -0,0 +1,154 @@
(function($) {
$.fn.tweet = function(o){
var s = {
username: ["seaofclouds"], // [string] required, unless you want to display our tweets. :) it can be an array, just do ["username1","username2","etc"]
list: null, //[string] optional name of list belonging to username
avatar_size: null, // [integer] height and width of avatar if displayed (48px max)
count: 3, // [integer] how many tweets to display?
intro_text: null, // [string] do you want text BEFORE your your tweets?
outro_text: null, // [string] do you want text AFTER your tweets?
join_text: null, // [string] optional text in between date and tweet, try setting to "auto"
auto_join_text_default: "i said,", // [string] auto text for non verb: "i said" bullocks
auto_join_text_ed: "i", // [string] auto text for past tense: "i" surfed
auto_join_text_ing: "i am", // [string] auto tense for present tense: "i was" surfing
auto_join_text_reply: "i replied to", // [string] auto tense for replies: "i replied to" @someone "with"
auto_join_text_url: "i was looking at", // [string] auto tense for urls: "i was looking at" http:...
loading_text: null, // [string] optional loading text, displayed while tweets load
query: null // [string] optional search query
};
if(o) $.extend(s, o);
$.fn.extend({
linkUrl: function() {
var returning = [];
var regexp = /((ftp|http|https):\/\/(\w+:{0,1}\w*@)?(\S+)(:[0-9]+)?(\/|\/([\w#!:.?+=&%@!\-\/]))?)/gi;
this.each(function() {
returning.push(this.replace(regexp,"<a href=\"$1\">$1</a>"));
});
return $(returning);
},
linkUser: function() {
var returning = [];
var regexp = /[\@]+([A-Za-z0-9-_]+)/gi;
this.each(function() {
returning.push(this.replace(regexp,"<a href=\"http://twitter.com/$1\">@$1</a>"));
});
return $(returning);
},
linkHash: function() {
var returning = [];
var regexp = / [\#]+([A-Za-z0-9-_]+)/gi;
this.each(function() {
returning.push(this.replace(regexp, ' <a href="http://search.twitter.com/search?q=&tag=$1&lang=all&from='+s.username.join("%2BOR%2B")+'">#$1</a>'));
});
return $(returning);
},
capAwesome: function() {
var returning = [];
this.each(function() {
returning.push(this.replace(/\b(awesome)\b/gi, '<span class="awesome">$1</span>'));
});
return $(returning);
},
capEpic: function() {
var returning = [];
this.each(function() {
returning.push(this.replace(/\b(epic)\b/gi, '<span class="epic">$1</span>'));
});
return $(returning);
},
makeHeart: function() {
var returning = [];
this.each(function() {
returning.push(this.replace(/(&lt;)+[3]/gi, "<tt class='heart'>&#x2665;</tt>"));
});
return $(returning);
}
});
function relative_time(time_value) {
var parsed_date = Date.parse(time_value);
var relative_to = (arguments.length > 1) ? arguments[1] : new Date();
var delta = parseInt((relative_to.getTime() - parsed_date) / 1000);
var pluralize = function (singular, n) {
return '' + n + ' ' + singular + (n == 1 ? '' : 's');
};
if(delta < 60) {
return 'less than a minute ago';
} else if(delta < (45*60)) {
return 'about ' + pluralize("minute", parseInt(delta / 60)) + ' ago';
} else if(delta < (24*60*60)) {
return 'about ' + pluralize("hour", parseInt(delta / 3600)) + ' ago';
} else {
return 'about ' + pluralize("day", parseInt(delta / 86400)) + ' ago';
}
}
function build_url() {
var proto = ('https:' == document.location.protocol ? 'https:' : 'http:');
if (s.list) {
return proto+"//api.twitter.com/1/"+s.username[0]+"/lists/"+s.list+"/statuses.json?per_page="+s.count+"&callback=?";
} else if (s.query == null && s.username.length == 1) {
return proto+'//twitter.com/status/user_timeline/'+s.username[0]+'.json?count='+s.count+'&callback=?';
} else {
var query = (s.query || 'from:'+s.username.join('%20OR%20from:'));
return proto+'//search.twitter.com/search.json?&q='+query+'&rpp='+s.count+'&callback=?';
}
}
return this.each(function(){
var list = $('<ul class="tweet_list">').appendTo(this);
var intro = '<p class="tweet_intro">'+s.intro_text+'</p>';
var outro = '<p class="tweet_outro">'+s.outro_text+'</p>';
var loading = $('<p class="loading">'+s.loading_text+'</p>');
if(typeof(s.username) == "string"){
s.username = [s.username];
}
if (s.loading_text) $(this).append(loading);
$.getJSON(build_url(), function(data){
if (s.loading_text) loading.remove();
if (s.intro_text) list.before(intro);
$.each((data.results || data), function(i,item){
// auto join text based on verb tense and content
if (s.join_text == "auto") {
if (item.text.match(/^(@([A-Za-z0-9-_]+)) .*/i)) {
var join_text = s.auto_join_text_reply;
} else if (item.text.match(/(^\w+:\/\/[A-Za-z0-9-_]+\.[A-Za-z0-9-_:%&\?\/.=]+) .*/i)) {
var join_text = s.auto_join_text_url;
} else if (item.text.match(/^((\w+ed)|just) .*/im)) {
var join_text = s.auto_join_text_ed;
} else if (item.text.match(/^(\w*ing) .*/i)) {
var join_text = s.auto_join_text_ing;
} else {
var join_text = s.auto_join_text_default;
}
} else {
var join_text = s.join_text;
};
var from_user = item.from_user || item.user.screen_name;
var profile_image_url = item.profile_image_url || item.user.profile_image_url;
var join_template = '<span class="tweet_join"> '+join_text+' </span>';
var join = ((s.join_text) ? join_template : ' ');
var avatar_template = '<a class="tweet_avatar" href="http://twitter.com/'+from_user+'"><img src="'+profile_image_url+'" height="'+s.avatar_size+'" width="'+s.avatar_size+'" alt="'+from_user+'\'s avatar" title="'+from_user+'\'s avatar" border="0"/></a>';
var avatar = (s.avatar_size ? avatar_template : '');
var date = '<a href="http://twitter.com/'+from_user+'/statuses/'+item.id+'" title="view tweet on twitter">'+relative_time(item.created_at)+'</a>';
var text = '<span class="tweet_text">' +$([item.text]).linkUrl().linkUser().linkHash().makeHeart().capAwesome().capEpic()[0]+ '</span>';
// until we create a template option, arrange the items below to alter a tweet's display.
list.append('<li>' + avatar + date + join + text + '</li>');
list.children('li:first').addClass('tweet_first');
list.children('li:odd').addClass('tweet_even');
list.children('li:even').addClass('tweet_odd');
});
if (s.outro_text) list.after(outro);
});
});
};
})(jQuery);

View File

@ -0,0 +1,65 @@
ul.todo_list {
list-style-type: none;
margin: 0;
padding: 0;
}
ul.todo_list li {
display: block;
margin: 0;
padding: 7px 0;
border-top: 1px solid #eee;
}
ul.todo_list li p {
display: inline;
}
ul.todo_list li p.link {
font-weight: bold;
}
ul.todo_list li p.details {
font-style: italic;
}
ul.todo_list li {
}
div.admonition {
border: 1px solid #8F1000;
}
div.admonition p.admonition-title {
background-color: #8F1000;
border-bottom: 1px solid #8E8E8E;
}
a {
color: #CF2F19;
}
div.related ul li a {
color: #CF2F19;
}
div.sphinxsidebar h4 {
background-color:#8E8E8E;
border:1px solid #255E6E;
color:white;
font-size:1em;
margin:1em 0 0.5em;
padding:0.1em 0 0.1em 0.5em;
}
em {
font-style: normal;
}
table.docutils {
font-size: 11px;
}
a tt {
color:#CF2F19;
}

View File

View File

@ -0,0 +1,86 @@
{% extends "sphinxdoc/layout.html" %}
{% set css_files = css_files + ['_static/tweaks.css'] %}
{% set script_files = script_files + ['_static/jquery.tweet.js'] %}
{% block extrahead %}
<script type='text/javascript'>
$(document).ready(function(){
$("#twitter_feed").tweet({
username: "openstack",
query: "from:openstack",
avatar_size: 32,
count: 10,
loading_text: "loading tweets..."
});
});
</script>
{% endblock %}
{%- macro sidebar() %}
{%- if not embedded %}{% if not theme_nosidebar|tobool %}
<div class="sphinxsidebar">
<div class="sphinxsidebarwrapper">
{%- block sidebarlogo %}
{%- if logo %}
<p class="logo"><a href="{{ pathto(master_doc) }}">
<img class="logo" src="{{ pathto('_static/' + logo, 1) }}" alt="Logo"/>
</a></p>
{%- endif %}
{%- endblock %}
{%- block sidebartoc %}
{%- if display_toc %}
<h3><a href="{{ pathto(master_doc) }}">{{ _('Table Of Contents') }}</a></h3>
{{ toc }}
{%- endif %}
{%- endblock %}
{%- block sidebarrel %}
{%- if prev %}
<h4>{{ _('Previous topic') }}</h4>
<p class="topless"><a href="{{ prev.link|e }}"
title="{{ _('previous chapter') }}">{{ prev.title }}</a></p>
{%- endif %}
{%- if next %}
<h4>{{ _('Next topic') }}</h4>
<p class="topless"><a href="{{ next.link|e }}"
title="{{ _('next chapter') }}">{{ next.title }}</a></p>
{%- endif %}
{%- endblock %}
{%- block sidebarsourcelink %}
{%- if show_source and has_source and sourcename %}
<h3>{{ _('This Page') }}</h3>
<ul class="this-page-menu">
<li><a href="{{ pathto('_sources/' + sourcename, true)|e }}"
rel="nofollow">{{ _('Show Source') }}</a></li>
</ul>
{%- endif %}
{%- endblock %}
{%- if customsidebar %}
{% include customsidebar %}
{%- endif %}
{%- block sidebarsearch %}
{%- if pagename != "search" %}
<div id="searchbox" style="display: none">
<h3>{{ _('Quick search') }}</h3>
<form class="search" action="{{ pathto('search') }}" method="get">
<input type="text" name="q" size="18" />
<input type="submit" value="{{ _('Go') }}" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
<p class="searchtip" style="font-size: 90%">
{{ _('Enter search terms or a module, class or function name.') }}
</p>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
{%- endif %}
{%- if pagename == "index" %}
<h3>{{ _('Twitter Feed') }}</h3>
<div id="twitter_feed" class='twitter_feed'></div>
{%- endif %}
{%- endblock %}
</div>
</div>
{%- endif %}{% endif %}
{%- endmacro %}

View File

@ -0,0 +1,5 @@
[theme]
inherit = sphinxdoc
stylesheet = sphinxdoc.css
pygments_style = friendly

247
doc/source/conf.py Normal file
View File

@ -0,0 +1,247 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Ranger-Agent documentation build configuration file, created by
# sphinx-quickstart on Tue May 18 13:50:15 2010.
#
# This file is execfile()'d with the current directory set to it's containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.intersphinx',
'sphinx.ext.pngmath',
'sphinx.ext.graphviz',
'sphinx.ext.todo']
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = []
if os.getenv('HUDSON_PUBLISH_DOCS'):
templates_path = ['_ga', '_templates']
else:
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ranger-agent'
copyright = u'2011-present, OpenStack, LLC.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
from ord import __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
modindex_common_prefix = ['ranger-agent.']
# -- Options for man page output --------------------------------------------
# Grouping the document tree for man pages.
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
man_pages = [
('man/rangeragentapi', 'ranger-agent-api', u'Ranger Agent API Server',
[u'OpenStack'], 1),
('man/rangeragentengine', 'ranger-agent-registry', u'Ranger Agent Engine Server',
[u'OpenStack'], 1)
]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme_path = ["."]
html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = ['_theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# -- Options for LaTeX output ------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author,
# documentclass [howto/manual]).
latex_documents = [
('index', 'ranger-agent.tex', u'Ranger agent Documentation',
u'Ranger Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'python': ('http://docs.python.org/', None),
'dashboard': ('http://dashboard.openstack.org', None),
'glance': ('http://glance.openstack.org', None),
'keystone': ('http://keystone.openstack.org', None),
'ranger-agent': ('http://ranger-agent.openstack.org', None),
'swift': ('http://swift.openstack.org', None)}

53
doc/source/index.rst Normal file
View File

@ -0,0 +1,53 @@
..
Copyright 2011 OpenStack, LLC.
All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
Welcome to Skeleton's documentation!
===================================
Description of Skeleton project
Concepts
========
.. toctree::
:maxdepth: 1
Using Skeleton
==============
.. toctree::
:maxdepth: 1
gettingstarted
installing
Developer Docs
==============
.. toctree::
:maxdepth: 1
Outstanding Documentation Tasks
===============================
.. todolist::
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

18
etc/api-paste.ini Normal file
View File

@ -0,0 +1,18 @@
# ORD API WSGI Pipeline
# Define the filters that make up the pipeline for processing WSGI requests
# Note: This pipeline is PasteDeploy's term rather than ORD's pipeline
# used for processing samples
# Remove authtoken from the pipeline if you don't want to use keystone authentication
[pipeline:main]
pipeline = request_id api-server
[app:api-server]
paste.app_factory = ord.api.app:app_factory
[filter:authtoken]
paste.filter_factory = keystonemiddleware.auth_token:filter_factory
[filter:request_id]
paste.filter_factory = oslo.middleware:RequestId.factory

46
etc/ord.conf Executable file
View File

@ -0,0 +1,46 @@
[DEFAULT]
api_workers = 1
debug = True
verbose = True
pecan_debug = True
region = local
repo_connection_timeout = 120
#resource_creation_timeout_min will used by other resources
resource_creation_timeout_min = 1200
#resource_creation_timeout_max will used by image
resource_creation_timeout_max = 14400
#Log files location
log_dir = /var/log/ranger-agent
local_repo = ranger_repo
resource_status_check_wait = 15
[api]
# Address to bind the API server to
host = 0.0.0.0
# Port the bind the API server to
port = 9010
[ord_credentials]
auth_url = http://127.0.0.1:5000/v2.0
user_name = admin
password = ranger-agent
tenant_name = admin
openstack_client_http_timeout = 360
https_insecure = False
[database]
db_connection = mysql://root:stack@localhost/ord?charset=utf8
[oslo_messaging_rabbit]
rabbit_userid = stackrabbit
rabbit_password = stack
rabbit_hosts = localhost
rabbit_port = 5672
[orm]
#This will assume the required ssh-keys are all already populated
orm_template_repo_url = git@127.0.0.1:/home/repo/ranger_repo.git
#This is fake service call will be replaced with rds url
rds_listener_endpoint = http://135.16.225.210:8777/v1/rds/status
repo_pull_check_wait = 2
retry_limits = 5

19
ord/__init__.py Normal file
View File

@ -0,0 +1,19 @@
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'ranger-agent').version_string()

33
ord/api/__init__.py Normal file
View File

@ -0,0 +1,33 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
# Register options for the service
OPTS = [
cfg.IntOpt('port',
default=9010,
help='The port for the ORD API server.',
),
cfg.StrOpt('host',
default='0.0.0.0',
help='The listen IP for the ORD API server.',
),
]
CONF = cfg.CONF
opt_group = cfg.OptGroup(name='api',
title='Options for the ord-api service')
CONF.register_group(opt_group)
CONF.register_opts(OPTS, opt_group)

150
ord/api/app.py Normal file
View File

@ -0,0 +1,150 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from ord.api import config as api_config
from ord.api.controllers.v1 import api
from ord.api import middleware
from ord.i18n import _
from ord.i18n import _LW
from ord.openstack.common import log
from ord import service
import os
from oslo_config import cfg
import oslo_messaging as messaging
from paste import deploy
import pecan
from werkzeug import serving
LOG = log.getLogger(__name__)
CONF = cfg.CONF
OPTS = [
cfg.StrOpt('api_paste_config',
default="api-paste.ini",
help="Configuration file for WSGI definition of API."
),
cfg.IntOpt('api_workers', default=1,
help='Number of workers for ORD API server.'),
]
API_OPTS = [
cfg.BoolOpt('pecan_debug',
default=False,
help='Toggle Pecan Debug Middleware.'),
]
CONF.register_opts(OPTS)
CONF.register_opts(API_OPTS, group='api')
def get_pecan_config():
# Set up the pecan configuration
filename = api_config.__file__.replace('.pyc', '.py')
return pecan.configuration.conf_from_file(filename)
def setup_app(pecan_config=None, extra_hooks=None):
app_hooks = []
if extra_hooks:
app_hooks.extend(extra_hooks)
if not pecan_config:
pecan_config = get_pecan_config()
pecan.configuration.set_config(dict(pecan_config), overwrite=True)
# NOTE(sileht): pecan debug won't work in multi-process environment
pecan_debug = CONF.api.pecan_debug
if service.get_workers('api') != 1 and pecan_debug:
pecan_debug = False
LOG.warning(_LW('pecan_debug cannot be enabled, if workers is > 1, '
'the value is overrided with False'))
app = pecan.make_app(
pecan_config.app.root,
debug=pecan_debug,
force_canonical=getattr(pecan_config.app, 'force_canonical', True),
hooks=app_hooks,
wrap_app=middleware.ParsableErrorMiddleware,
guess_content_type_from_ext=False
)
transport = messaging.get_transport(cfg.CONF)
target = messaging.Target(topic='ord-listener-q', server=cfg.CONF.host)
endpoints = [api.ListenerQueueHandler()]
server = messaging.get_rpc_server(transport,
target,
endpoints,
executor='eventlet')
server.start()
LOG.info("*********************************started")
return app
class VersionSelectorApplication(object):
def __init__(self):
pc = get_pecan_config()
def not_found(environ, start_response):
start_response('404 Not Found', [])
return []
self.v1 = setup_app(pecan_config=pc)
def __call__(self, environ, start_response):
return self.v1(environ, start_response)
def load_app():
# Build the WSGI app
cfg_file = None
cfg_path = cfg.CONF.api_paste_config
if not os.path.isabs(cfg_path):
cfg_file = CONF.find_file(cfg_path)
elif os.path.exists(cfg_path):
cfg_file = cfg_path
if not cfg_file:
raise cfg.ConfigFilesNotFoundError([cfg.CONF.api_paste_config])
LOG.info("Full WSGI config used: %s" % cfg_file)
return deploy.loadapp("config:" + cfg_file)
def build_server():
app = load_app()
# Create the WSGI server and start it
host, port = cfg.CONF.api.host, cfg.CONF.api.port
LOG.info(_('Starting server in PID %s') % os.getpid())
LOG.info(_("Configuration:"))
cfg.CONF.log_opt_values(LOG, logging.INFO)
if host == '0.0.0.0':
LOG.info(_(
'serving on 0.0.0.0:%(sport)s, view at http://127.0.0.1:%(vport)s')
% ({'sport': port, 'vport': port}))
else:
LOG.info(_("serving on http://%(host)s:%(port)s") % (
{'host': host, 'port': port}))
workers = service.get_workers('api')
serving.run_simple(cfg.CONF.api.host, cfg.CONF.api.port,
app, processes=workers)
def app_factory(global_config, **local_conf):
return VersionSelectorApplication()

32
ord/api/config.py Normal file
View File

@ -0,0 +1,32 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Server Specific Configurations
server = {
'port': '9010',
'host': '0.0.0.0'
}
# Pecan Application Configurations
app = {
'root': 'ord.api.controllers.root.RootController',
'modules': ['ord.api'],
}
# Custom Configurations must be in Python dictionary format::
#
# foo = {'bar':'baz'}
#
# All configurations are accessible at::
# pecan.conf

View File

View File

@ -0,0 +1,55 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from ord.api.controllers.v1 import root as v1
MEDIA_TYPE_JSON = 'application/vnd.openstack.telemetry-%s+json'
MEDIA_TYPE_XML = 'application/vnd.openstack.telemetry-%s+xml'
class RootController(object):
v1 = v1.V1Controller()
@pecan.expose('json')
def index(self):
base_url = pecan.request.host_url
available = [{'tag': 'v1', 'date': '2013-02-13T00:00:00Z', }]
collected = [version_descriptor(base_url, v['tag'], v['date'])
for v in available]
versions = {'versions': {'values': collected}}
return versions
def version_descriptor(base_url, version, released_on):
url = version_url(base_url, version)
return {
'id': version,
'links': [
{'href': url, 'rel': 'self', },
{'href': 'http://docs.openstack.org/',
'rel': 'describedby', 'type': 'text/html', }],
'media-types': [
{'base': 'application/json', 'type': MEDIA_TYPE_JSON % version, },
{'base': 'application/xml', 'type': MEDIA_TYPE_XML % version, }],
'status': 'stable',
'updated': released_on,
}
def version_url(base_url, version_number):
return '%s/%s' % (base_url, version_number)

View File

View File

@ -0,0 +1,284 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ord.client import rpcapi
from ord.common import exceptions as exc
from ord.common import utils
from ord.i18n import _
from ord.common.utils import ErrorCode
from ord.db import api as db_api
from ord.openstack.common import log
from oslo_config import cfg
from pecan import expose
from urllib2 import HTTPError
import datetime
import json
import oslo_messaging as messaging
import urllib2
import uuid
import webob
import webob.exc
LOG = log.getLogger(__name__)
CONF = cfg.CONF
orm_opts = [
cfg.StrOpt('rds_listener_endpoint',
help='Endpoint to rds_listener ')
]
opts = [
cfg.StrOpt('region',
help='Region')
]
CONF.register_opts(opts)
opt_group = cfg.OptGroup(name='orm',
title='Options for the orm service')
CONF.register_group(opt_group)
CONF.register_opts(orm_opts, opt_group)
class ListenerQueueHandler(object):
def __init__(self):
super(ListenerQueueHandler, self).__init__()
def invoke_listener_rpc(self, ctxt, payload):
LOG.debug(" ----- message from Engine -----")
LOG.debug(" Payload: %s \n ctxt: %s " % (str(payload), str(ctxt)))
LOG.debug(" -------------------------------")
listener_response_body = {}
try:
listener_response_body = json.loads(payload)
LOG.debug(" Payload to RDS Listener %s " % listener_response_body)
headers = {'Content-type': 'application/json'}
rds_url = CONF.orm.rds_listener_endpoint
req = urllib2.Request(rds_url,
json.dumps(listener_response_body),
headers)
args = {}
template_status_id = None
if 'rds-listener' in listener_response_body:
error_code = (listener_response_body['rds-listener']
['error-code'])
error_msg = (listener_response_body['rds-listener']
['error-msg'])
args['error_msg'] = error_msg
args['error_code'] = error_code
template_status_id = (listener_response_body['rds-listener']
['ord-notifier-id'])
status_code = None
try:
LOG.info('Connecting to RDS at %s' % rds_url)
resp = urllib2.urlopen(req)
status = utils.STATUS_RDS_SUCCESS
if resp is not None:
status_code = resp.getcode()
except (HTTPError, Exception) as e:
status = utils.STATUS_RDS_ERROR
if "getcode" in dir(e):
status_code = e.getcode()
raise exc.RDSListenerHTTPError(error_msg=error_msg,
error_code=error_code)
except ValueError as e:
status = utils.STATUS_RDS_ERROR
LOG.error('Error while parsing input payload %r', e)
except Exception as ex:
status = utils.STATUS_RDS_ERROR
LOG.error('Error while calling RDS Listener %r', ex)
finally:
LOG.info('RDS Listener status %s ' % status)
LOG.info('RDS Listener status code %s ' % status_code)
db_api.update_target_data(template_status_id, status, **args)
class NotifierController(object):
def __init__(self):
super(NotifierController, self).__init__()
self._rpcapi = rpcapi.RpcAPI()
def _prepare_response_message(self, kwargs, target_data,
status, error_msg=None, error_code=None):
LOG.debug("Create response body with status %s \
code %s " % (status, error_code))
LOG.debug("message-body %r " % kwargs)
response_body = kwargs
response_body['status'] = status
if error_code is not None:
response_body['error_msg'] = error_msg
response_body['error_code'] = error_code
return response_body
def _validate_request(self, kwargs):
error_code = None
error_msg = None
template = db_api.retrieve_template(kwargs['request_id'])
LOG.debug('Template from DB Call %r ' % template)
template_target = None
if template is not None:
template_target = db_api.retrieve_target(kwargs['request_id'])
LOG.debug('Template target from DB Call %r ' % template_target)
if template is not None:
if template_target['status'] == \
utils.STATUS_SUBMITTED:
error_code = ErrorCode.ORD_002.value
error_msg = ErrorCode.tostring(error_code)
elif kwargs.get('resource-template-name') == \
template_target.get('resource_template_name') and \
(template_target.get('status') == utils.STATUS_SUBMITTED or
template_target.get('resource_operation') ==
kwargs.get('resource-operation')):
error_code = ErrorCode.ORD_001.value
error_msg = ErrorCode.tostring(error_code)
return error_code, error_msg
def _persist_notification_record(self, kwargs):
LOG.debug("Persist Template record to database")
kwargs['time_stamp'] = str(datetime.datetime.now())
error_code, error_msg = self._validate_request(kwargs)
if error_code is not None:
response = self._prepare_response_message(kwargs,
kwargs,
status='Not Submitted',
error_msg=error_msg,
error_code=error_code)
return response
db_api.create_template(kwargs)
response = self._prepare_response_message(kwargs,
kwargs,
status='Submitted')
return response
def _validate_input_request(self, payload):
for key in payload:
if " " in payload.get(key):
LOG.debug('Input payload contain white spaces %s' %
str(payload))
msg = _('%s contains white spaces') % key
raise webob.exc.HTTPBadRequest(explanation=msg)
@expose(generic=True)
def ord_notifier(self, **args):
raise webob.exc.HTTPNotFound
@ord_notifier.when(method='GET', template='json')
def ord_notifier_status(self, **vals):
template_id = vals.get('Id')
payload = {}
LOG.debug('Request for check Status by Id %s ' % template_id)
template = db_api.retrieve_template(template_id)
if template is not None:
template_target = db_api.retrieve_target(template_id)
payload = utils.create_rds_payload(template, template_target)
LOG.debug('Payload for check Status by Id:%s is:%s'
% (template_id, payload))
return payload
@ord_notifier.when(method='POST', template='json')
def ord_notifier_POST(self, **vals):
vals = vals['ord-notifier']
request_id = vals.get('request-id')
if request_id is None:
msg = _("A valid request_id parameter is required")
raise webob.exc.HTTPBadRequest(explanation=msg)
# FIXME(db2242) we don't process this field. So why for it here?
resource_type = vals.get('resource-type')
if resource_type is None:
msg = _("A valid resource_type parameter is required")
raise webob.exc.HTTPBadRequest(explanation=msg)
# FIXME(db2242): we support specific set of operation. We must check
# that received operation is in support list.
resource_operation = vals.get('operation')
if resource_operation is None:
msg = _("A valid resource_operation parameter is required")
raise webob.exc.HTTPBadRequest(explanation=msg)
resource_name = vals.get('resource-template-name')
if resource_name is None:
msg = _("A valid resource-template-name parameter is required")
raise webob.exc.HTTPBadRequest(explanation=msg)
# FIXME(db2242): why the hell we need it?
template_version = vals.get('resource-template-version')
# FIXME(db2242): we can handle only 'hot' or 'ansible' values here
# Everything else must be rejected here.
template_type = vals.get('resource-template-type')
if template_type is None:
template_type = utils.TEMPLATE_TYPE_HEAT
status_id = str(uuid.uuid4())
region = vals.get('region')
if region is None:
msg = _("A valid region is required")
raise webob.exc.HTTPBadRequest(explanation=msg)
elif region != CONF.region:
msg = _("Invalid region specified")
raise webob.exc.HTTPBadRequest(explanation=msg)
resource_id = ''
if 'resource-id' in vals:
resource_id = vals.get('resource-id')
kwargs = {
'request_id': str(request_id),
'resource_id': resource_id,
'template_type': template_type,
'resource_operation': resource_operation,
'resource_name': resource_name,
'resource_type': resource_type,
'resource_template_version': template_version,
'template_status_id': status_id,
'status': utils.STATUS_SUBMITTED,
'region': region
}
self._validate_input_request(kwargs)
LOG.debug('Payload to DB call %r ' % kwargs)
db_response = self._persist_notification_record(kwargs=kwargs)
response = {}
vals['status'] = db_response['status']
if 'error_code' in db_response:
vals['error-code'] = db_response['error_code']
vals['error-msg'] = db_response['error_msg']
response['ord-notifier-response'] = vals
if 'error_code' not in db_response:
LOG.debug("----- message to Engine -----")
LOG.debug(" message: %s \nstatus_id: %s" %
(str(kwargs), str(status_id)))
LOG.debug("-----------------------------")
payload = str(kwargs)
try:
ctxt = {'request_id': kwargs.get('request_id')}
self._rpcapi.invoke_notifier_rpc(ctxt, payload)
except messaging.MessageDeliveryFailure:
LOG.error("Fail to deliver message")
else:
LOG.debug("Template submission to DB failed with %s "
% db_response['error_msg'])
LOG.debug("Message to engine is not triggered")
return response

View File

@ -0,0 +1,22 @@
#
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ord.api.controllers.v1 import api
class V1Controller(object):
"""Version 1 API controller root."""
ord = api.NotifierController()

128
ord/api/middleware.py Executable file
View File

@ -0,0 +1,128 @@
#
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Middleware to replace the plain text message body of an error
response with one formatted so the client can parse it.
Based on pecan.middleware.errordocument
"""
import json
from lxml import etree
import webob
from ord import i18n
from ord.i18n import _
from ord.openstack.common import log
LOG = log.getLogger(__name__)
class ParsableErrorMiddleware(object):
"""Replace error body with something the client can parse."""
@staticmethod
def best_match_language(accept_language):
"""Determines best available locale from the Accept-Language header.
:returns: the best language match or None if the 'Accept-Language'
header was not available in the request.
"""
if not accept_language:
return None
all_languages = i18n.get_available_languages()
return accept_language.best_match(all_languages)
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
# Request for this state, modified by replace_start_response()
# and used when an error is being reported.
state = {}
def replacement_start_response(status, headers, exc_info=None):
"""Overrides the default response to make errors parsable."""
try:
status_code = int(status.split(' ')[0])
state['status_code'] = status_code
except (ValueError, TypeError): # pragma: nocover
raise Exception((
'ErrorDocumentMiddleware received an invalid '
'status %s' % status
))
else:
if (state['status_code'] / 100) not in (2, 3):
# Remove some headers so we can replace them later
# when we have the full error message and can
# compute the length.
headers = [(h, v)
for (h, v) in headers
if h not in ('Content-Length', 'Content-Type')
]
# Save the headers in case we need to modify them.
state['headers'] = headers
return start_response(status, headers, exc_info)
app_iter = self.app(environ, replacement_start_response)
if (state['status_code'] / 100) not in (2, 3):
req = webob.Request(environ)
# Find the first TranslationHook in the array of hooks and use the
# translatable_error object from it
error = None
# TODO(nh863p): Commenting below code since hooks are not used
# Should be enabled once hooks are introduced
"""
for hook in self.app.hooks:
if isinstance(hook, hooks.TranslationHook):
error = hook.local_error.translatable_error
break
"""
user_locale = self.best_match_language(req.accept_language)
if (req.accept.best_match(
['application/json', 'application/xml']) ==
'application/xml'):
try:
# simple check xml is valid
fault = etree.fromstring('\n'.join(app_iter))
# Add the translated error to the xml data
if error is not None:
for fault_string in fault.findall('faultstring'):
fault_string.text = i18n.translate(error,
user_locale)
body = ['<error_message>' + etree.tostring(fault) +
'</error_message>']
except etree.XMLSyntaxError as err:
LOG.error(_('Error parsing HTTP response: %s') % err)
body = ['<error_message>%s' % state['status_code'] +
'</error_message>']
state['headers'].append(('Content-Type', 'application/xml'))
else:
try:
fault = json.loads('\n'.join(app_iter))
if error is not None and 'faultstring' in fault:
fault['faultstring'] = i18n.translate(error,
user_locale)
body = [json.dumps({'error_message': fault})]
except ValueError as err:
body = [json.dumps({'error_message': '\n'.join(app_iter)})]
state['headers'].append(('Content-Type', 'application/json'))
state['headers'].append(('Content-Length', str(len(body[0]))))
else:
body = app_iter
return body

0
ord/client/__init__.py Normal file
View File

158
ord/client/client.py Normal file
View File

@ -0,0 +1,158 @@
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heatclient import client as heat
from glanceclient import client as glance
from keystoneclient import discover as keystone_discover
from keystoneclient.v2_0 import client as keystone_v2
from keystoneclient.v3 import client as keystone_v3
from oslo_config import cfg
from ord.openstack.common import log as logging
# FIXME(db2242): we definetly must change this group name. It very confusing.
OPT_GROUP = cfg.OptGroup(name='ord_credentials', title='ORD Credentials')
SERVICE_OPTS = [
cfg.StrOpt('project_id', default='',
help="project id used by ranger-agent "
"driver of service vm extension"),
cfg.StrOpt('auth_url', default='http://0.0.0.0:5000/v2.0',
help="auth URL used by ranger-agent "
"driver of service vm extension"),
cfg.StrOpt('user_name', default='',
help="user name used by ranger-agent "
"driver of service vm extension"),
cfg.StrOpt('password', default='',
help="password used by ranger-agent "
"driver of service vm extension"),
cfg.StrOpt('tenant_name', default='',
help="tenant name used by ranger-agent driver of service vm "
"extension"),
cfg.FloatOpt("openstack_client_http_timeout", default=180.0,
help="HTTP timeout for any of OpenStack service in seconds"),
cfg.BoolOpt("https_insecure", default=False,
help="Use SSL for all OpenStack API interfaces"),
cfg.StrOpt("https_cacert", default=None,
help="Path to CA server certificate for SSL")
]
cfg.CONF.register_opts(SERVICE_OPTS, OPT_GROUP)
CONF = cfg.CONF.ord_credentials
LOG = logging.getLogger(__name__)
def cached(func):
"""Cache client handles."""
def wrapper(self, *args, **kwargs):
key = '{0}{1}{2}'.format(func.__name__,
str(args) if args else '',
str(kwargs) if kwargs else '')
if key in self.cache:
return self.cache[key]
self.cache[key] = func(self, *args, **kwargs)
return self.cache[key]
return wrapper
def create_keystone_client(args):
discover = keystone_discover.Discover(auth_url=args['auth_url'])
for version_data in discover.version_data():
version = version_data['version']
if version[0] <= 2:
return keystone_v2.Client(**args)
elif version[0] == 3:
return keystone_v3.Client(**args)
class Clients(object):
def __init__(self):
self.cache = {}
def clear(self):
"""Remove all cached client handles."""
self.cache = {}
@cached
def keystone(self):
"""Returns keystone Client."""
params = {
'username': CONF.user_name,
'password': CONF.password,
'auth_url': CONF.auth_url,
}
if CONF.project_id:
params['tenant_id'] = CONF.project_id
else:
params['tenant_name'] = CONF.tenant_name
client = create_keystone_client(params)
if client.auth_ref is None:
client.authenticate()
return client
@cached
def heat(self, kc, version='1'):
"""Returns heat client for given version
@param version: string that specifies the HEAT API version
@return heatclient.client.Client
"""
attempt = 1
while attempt >= 0:
try:
heat_api_url = kc.service_catalog.url_for(
service_type='orchestration')
auth_token = kc.auth_token
timeout = CONF.openstack_client_http_timeout
client = heat.Client(version,
endpoint=heat_api_url,
token=auth_token,
timeout=timeout,
insecure=CONF.https_insecure,
cacert=CONF.https_cacert)
return client, kc
except Exception:
kc = self.keystone()
attempt = attempt - 1
@cached
def glance(self, kc, version='2'):
"""Returns glance client for given version
@param version: string that specifies the GLANCE API version
@return glanceclient.client.Client
"""
attempt = 1
while attempt >= 0:
try:
glance_api_url = kc.service_catalog.url_for(
service_type='image')
auth_token = kc.auth_token
timeout = CONF.openstack_client_http_timeout
client = glance.Client(version,
endpoint=glance_api_url,
token=auth_token,
timeout=timeout,
insecure=CONF.https_insecure,
cacert=CONF.https_cacert)
return client, kc
except Exception:
kc = self.keystone()
attempt = attempt - 1

190
ord/client/getrepo.py Executable file
View File

@ -0,0 +1,190 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import subprocess
import shlex
import ord.common.exceptions as excp
from oslo_config import cfg
from ord.openstack.common import log as logging
CONF = cfg.CONF
ORM_OPTS = [
cfg.StrOpt('orm_template_repo_url',
default='',
help='Remote repo location'),
cfg.Opt('repo_pull_check_wait',
default='1',
help='Wait Time'),
cfg.IntOpt('resource_status_check_wait', default=10,
help='delay in seconds between two retry call'),
cfg.IntOpt('retry_limits',
default=5,
help='number of retry'),
]
cfg.CONF.register_opts(ORM_OPTS, group='orm')
LOG = logging.getLogger(__name__)
class TemplateRepoClient(object):
"""Implementation to download template from repo.
Requires minimal installation (git) and minimal upkeep.
"""
def __init__(self, local_repo):
"""Clone git repo."""
self.git_repo_status = False
self.git_init_repo(local_repo)
def git_init_repo(self, local_repo):
# Check if local git repo already exists
repopath = os.path.join(os.environ['HOME'], local_repo)
repo = cfg.CONF.orm.orm_template_repo_url
LOG.debug(
"%s Setting up repo initiated ...", os.path.basename(repo))
# create the git repo directory if not exists
if not os.path.isdir(repopath):
os.makedirs(repopath)
# initialize repo directory as a git repo
cmd = 'git init {0}'.format(repopath)
self.run_git('GitRepoInit', cmd)
# set remote origin
cmd = 'git -C {0} remote add origin {1}'.format(
repopath, repo)
self.git_repo_status = self.run_git('GitRepoInit', cmd)
# fetch origin
cmd = 'git -C {0} fetch origin'.format(
repopath)
self.git_repo_status = self.run_git('GitRepoInit', cmd)
LOG.debug(
"%s repo setup successfully", os.path.basename(repo))
def pull_template(self, local_repo, pathtotemplate):
"""Get template from repo.
:param local_repo: local repo name
:param pathtotemplate: path to template
"""
if not self.git_repo_status:
self.git_init_repo(local_repo)
LOG.debug("Template pull initiated ...")
workdir = os.path.join(os.environ['HOME'], local_repo)
# normalize the path before checking if file exists
templatepath = os.path.normpath(
os.path.join(workdir, pathtotemplate))
# delete previous version
if os.path.isfile(templatepath):
os.remove(templatepath)
timeout_sec = cfg.CONF.orm.resource_status_check_wait
cmd = 'git -C {0} fetch origin'.format(workdir)
self.run_git('PullTemplate', cmd, timeout_sec)
cmd = 'git -C {0} checkout FETCH_HEAD -- {1}'.format(
workdir, pathtotemplate)
self.run_git('PullTemplate', cmd, timeout_sec)
LOG.debug("Template pull completed ...")
return templatepath
def run_git(self, label, cmd, timeout_sec=None):
LOG.info("Running cmd: '%s'", cmd)
timed_out = False
retry_left = CONF.orm.retry_limits
if timeout_sec is not None:
cmd = 'timeout -k {0}s {1}s {2}'.format(timeout_sec + 5,
timeout_sec, cmd)
LOG.info('Setting cmd timeout to: %s seconds', timeout_sec)
while(retry_left > 0):
try:
process = subprocess.Popen(
shlex.split(cmd), stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
[stdout, stderr] = process.communicate()
# 124 is the return code in the shell if timeout occurred
if process.returncode == 124:
timed_out = True
LOG.critical(
"Run command '%s' exceeded the alloted"
"time of %s seconds, process was killed.",
cmd, timeout_sec)
except Exception as exception:
LOG.critical("Unexpected error running '%s'"
"exception: %s",
cmd, exception.args)
[stdout, stderr] = process.communicate()
finally:
proc_result = {}
proc_result["returncode"] = process.returncode
proc_result["stdout"] = stdout.decode("UTF-8")
proc_result["stderr"] = stderr.decode("UTF-8")
proc_result["timed_out"] = timed_out
if proc_result["returncode"] == 0 or \
proc_result["returncode"] == 128:
retry_left = 0
process.returncode = 0
else:
retry_left -= 1
LOG.warning("stderr: %s", proc_result['stderr'])
LOG.warning("Retrying cmd '%s'. Retries left: %s",
cmd, retry_left)
self.git_repo_status = True
if process.returncode != 0:
self.git_repo_status = False
self.check_git_errors(label, proc_result)
def check_git_errors(self, label, result):
stderr = result['stderr'].lower()
if result['timed_out']:
raise excp.RepoTimeoutException(label=label)
if 'service not known' in stderr:
raise excp.RepoIncorrectURL(label=label)
if 'does not exist' in stderr:
raise excp.RepoNotExist(label=label)
if 'permission denied' in stderr:
raise excp.RepoNoPermission(label=label)
if 'did not match any file(s) known to git' in stderr:
raise excp.FileNotInRepo(label=label)
# general unknown exception in case none of the above
# are the cause of the problem
raise excp.RepoUnknownException(label=label, unknown=stderr)

103
ord/client/heat.py Normal file
View File

@ -0,0 +1,103 @@
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heatclient import exc as heat_exc
from ord.client.client import Clients
from ord.common import exceptions as exc
from ord.common import utils
class HeatClient(object):
_kc = None
def __init__(self):
# FIXME(db2242): we must not cache any clients because it done(must
# be done) by "Clients"
try:
if HeatClient._kc is None:
HeatClient._kc = Clients().keystone()
except Exception as e:
raise exc.KeystoneInitializationException(e.message)
def get_stacks(self):
client, self._kc = Clients().heat(self._kc)
try:
payload = client.stacks.list()
except heat_exc.BaseException as e:
raise exc.HEATIntegrationError(
action='stacks.list', details=e.message)
return payload
def get_stack(self, stack_id):
client, self._kc = Clients().heat(self._kc)
try:
payload = client.stacks.get(stack_id)
# TODO(db2242): check behaviour in case it object not exist
except heat_exc.BaseException as e:
raise exc.HEATIntegrationError(
action='stacks.get', details=e.message)
return payload
# TODO(db2242): check real heatclient capabilities to lookup objects
def get_stack_by_name(self, name):
for stack in self.get_stacks():
if stack.stack_name != name:
continue
break
else:
raise exc.HEATStackLookupError(query='name={!r}'.format(name))
return stack
def create_stack(self, name, template):
template = utils.load_file(template)
client, self._kc = Clients().heat(self._kc)
try:
response = client.stacks.create(
stack_name=name, template=template)
except heat_exc.BaseException as e:
raise exc.HEATStackCreateError(details=e.message)
return response
def update_stack(self, stack_id, template):
template = utils.load_file(template)
client, self._kc = Clients().heat(self._kc)
try:
response = client.stacks.update(stack_id, template=template)
except heat_exc.BaseException as e:
raise exc.HEATStackUpdateError(details=e.message)
return response
def delete_stack(self, stack_id):
client, self._kc = Clients().heat(self._kc)
try:
client.stacks.delete(stack_id)
except heat_exc.BaseException as e:
raise exc.HEATStackDeleteError(details=e.message)
def get_image_data_by_stackid(self, stack_id):
client, self._kc = Clients().heat(self._kc)
resources = client.resources.list(stack_id)
image_id = None
image_data = None
for resource in resources:
if utils.RESOURCE_IMAGE in resource.resource_type:
image_id = resource.physical_resource_id
glance_client = Clients().glance(self._kc)
if image_id:
image_data = glance_client.images.get(image_id)
return image_data

39
ord/client/rpcapi.py Normal file
View File

@ -0,0 +1,39 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from oslo_config import cfg
import oslo_messaging as messaging
LOG = logging.getLogger(__name__)
class RpcAPI(object):
def __init__(self):
super(RpcAPI, self).__init__()
self.target = messaging.Target(topic='ord-notifier-q')
self.transport = messaging.get_transport(cfg.CONF)
self._client = messaging.RPCClient(self.transport, self.target)
def invoke_notifier_rpc(self, ctxt, payload):
try:
cctxt = self._client.prepare(version='1.0')
cctxt.cast(ctxt=ctxt,
method='invoke_notifier_rpc',
payload=payload)
except messaging.MessageDeliveryFailure:
LOG.error("Fail to deliver message")

46
ord/client/rpcengine.py Normal file
View File

@ -0,0 +1,46 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from oslo_config import cfg
import oslo_messaging as messaging
from ord.common.exceptions import RPCInitializationException
LOG = logging.getLogger(__name__)
class RpcEngine(object):
def __init__(self):
super(RpcEngine, self).__init__()
try:
self.target = messaging.Target(topic='ord-listener-q')
self.transport = messaging.get_transport(cfg.CONF)
self._client = messaging.RPCClient(self.transport, self.target)
except Exception as exception:
LOG.critical(
"Unexpected error while initializing clients %s" % exception)
raise RPCInitializationException(exception.message)
def invoke_listener_rpc(self, ctxt, payload):
LOG.debug("invoke_listener_rpc is invoked")
try:
cctxt = self._client.prepare(version='1.0')
cctxt.cast(ctxt=ctxt,
method='invoke_listener_rpc',
payload=payload)
except messaging.MessageDeliveryFailure:
LOG.error("Fail to deliver message")

22
ord/client/utils.py Normal file
View File

@ -0,0 +1,22 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
def find_resource(resource_list, name_or_id, **find_args):
"""Helper for the _find_* methods."""
for resource in resource_list:
match = re.match(resource.__dict__[find_args['pattern']], name_or_id)
if match:
return resource

17
ord/cmd/__init__.py Normal file
View File

@ -0,0 +1,17 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import eventlet
eventlet.monkey_patch(socket=True, select=True, thread=True, time=True)

21
ord/cmd/api.py Normal file
View File

@ -0,0 +1,21 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ord.api import app
from ord import service
def main():
service.prepare_service()
app.build_server()

21
ord/cmd/engine.py Normal file
View File

@ -0,0 +1,21 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ord.engine import app
from ord import service
def main():
service.prepare_service()
app.start()

View File

@ -0,0 +1,217 @@
# -*- coding:utf-8 -*-
import argparse
import sys
import time
import uuid
import json
from werkzeug import exceptions as exc
from werkzeug import routing
from werkzeug import serving
from werkzeug import wrappers
def main():
argp = argparse.ArgumentParser()
argp.add_argument('bind_port', type=int,
help='Port number to bind to')
argp.add_argument('--bind-address', default='127.0.0.1',
help='Address to bind to')
argp.add_argument('--debug', default=False, action='store_true',
help='Enable debugging')
app_args = argp.parse_args()
app = _WSGIApplication()
serving.run_simple(
app_args.bind_address, app_args.bind_port, app,
use_debugger=app_args.debug, use_reloader=app_args.debug)
class _CatcherStorage(object):
def __init__(self, stale_tout=3600):
self.stale_tout = stale_tout
self.data = list()
def add(self, payload):
self.data.append({
'time': time.time(),
'idnr': str(uuid.uuid1()),
'payload': payload})
def lookup(self, since=None):
sidx = self._lookup_slice(since=since)[0]
if sidx:
sidx += 1 # skip "last" entity
return self.data[sidx:]
def delete(self, since=None, till=None):
if not self.data:
return
sidx, eidx = self._lookup_slice(since, till)
self.data[sidx:eidx] = []
def delete_entity(self, idnr):
for idx, entity in enumerate(self.data):
if entity['idnr'] != idnr:
continue
break
else:
raise ValueError('Entity not found')
self.data.pop(idx)
def _lookup_slice(self, since=None, till=None):
sidx = 0
eidx = None
if since:
for idx, entity in enumerate(self.data):
if entity['idnr'] != since:
continue
sidx = idx
break
if till:
for idx in xrange(len(self.data) - 1, sidx - 1, -1):
entity = self.data[idx]
if entity['idnr'] != till:
continue
eidx = idx + 1
break
return sidx, eidx
def _remove_staled(self):
stale_line = time.time()
stale_line -= min(stale_line, self.stale_tout)
for idx, entity in enumerate(self.data):
if entity['time'] < stale_line:
continue
break
else:
idx = 0
self.data[:idx] = []
class _HandlerBase(object):
def __init__(self, request, path_args):
self.request = request
self.path_args = path_args
def __call__(self):
raise NotImplementedError
class _NotifierCatcher(_HandlerBase):
def __call__(self):
storage.add(self._fetch_payload())
return {'op': True}
def _fetch_payload(self):
if self.request.content_type == 'application/json':
return self._payload_from_json()
return self._payload_from_form()
def _payload_from_json(self):
try:
payload = json.loads(self.request.data)
except (ValueError, TypeError) as e:
raise exc.BadRequest('Invalid payload: {}'.format(e))
return payload
def _payload_from_form(self):
payload = dict(self.request.form)
# FIXME(db2242): ugly fix of incorrect data transfer from ORD-API
if len(payload) != 1:
return payload
key = payload.keys()[0]
value = payload[key]
if value != ['']:
return payload
try:
payload = json.loads(key)
except (TypeError, ValueError):
pass
return payload
class _NotificationsBase(_HandlerBase):
pass
class _NotificationsList(_NotificationsBase):
def __call__(self):
last = self.request.args.get('last')
payload = storage.lookup(since=last)
return {
'notifications': payload}
class _NotificationsDelete(_NotificationsBase):
def __call__(self):
since = self.request.args.get('start')
till = self.request.args.get('end')
storage.delete(since, till)
return {'op': True}
class _NotificationsEntityDelete(_NotificationsBase):
def __call__(self):
try:
storage.delete_entity(self.path_args['idnr'])
except ValueError:
raise exc.NotFound
return {'op': True}
class _WSGIApplication(object):
url_map = routing.Map([
routing.Rule('/ord-target', endpoint='target', methods=['post']),
routing.Rule('/api/notifications', methods=['get'],
endpoint='api_notify:list'),
routing.Rule('/api/notifications', methods=['delete'],
endpoint='api_notify:remove'),
routing.Rule('/api/notifications/<idnr>', methods=['delete'],
endpoint='api_notify-entity:remove')])
endpoint_map = {
'target': _NotifierCatcher,
'api_notify:list': _NotificationsList,
'api_notify:remove': _NotificationsDelete,
'api_notify-entity:remove': _NotificationsEntityDelete}
def dispatch_request(self, request):
adapter = self.url_map.bind_to_environ(request.environ)
try:
endpoint, args = adapter.match()
endpoint = self.endpoint_map[endpoint]
view = endpoint(request, args)
payload = view()
payload = json.dumps(payload)
response = wrappers.Response(payload, mimetype='application/json')
except exc.HTTPException as e:
return e
return response
def __call__(self, environ, start_response):
return self.wsgi_app(environ, start_response)
def wsgi_app(self, environ, start_response):
request = wrappers.Request(environ)
response = self.dispatch_request(request)
return response(environ, start_response)
storage = _CatcherStorage()
if __name__ == '__main__':
sys.exit(main())

36
ord/cmd/manage.py Normal file
View File

@ -0,0 +1,36 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ord.db import migration as mig
from ord import service
def sync(version=None):
service.prepare_service()
"""Sync the database up to the most recent version."""
return mig.db_sync(version, database='ord')
def dbsync():
service.prepare_service()
"""Sync the database up to the most recent version."""
return mig.db_sync(version=None, database='ord')
def version():
service.prepare_service()
"""Print the current database version."""
print(mig.db_version(database='ord'))

0
ord/common/__init__.py Normal file
View File

274
ord/common/exceptions.py Executable file
View File

@ -0,0 +1,274 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
# ORD/RDS error_codes with description. This error codes used into
# responses/notifications designed for ORM/RDS system.
#
# ORD_000: Stack Creation Failed
# ORD_001: Template already exists (API)
# ORD_002: Template already submitted and in process (API)
# ORD_003: Template submission timed out (unused)
# ORD_004: Template submission failed (unused)
# ORD_005: Template pull timeout (unused - removed with timeout logic)
# ORD_006: Unsupported operation (shared with unsupported template type)
# ORD_007: Thread not found (unused - never returned to external apps)
# ORD_008: Unknown Exception
# ORD_009: Stack Modification Failed
# ORD_010: Stack Deletion Failed
# ORD_011: Not Able to Retrieve Stack Status (can't be used, covered by
# other errors)
# ORD_012: Stack not found
# ORD_013: Stack Time Out Exception (unused - removed with timeout logic)
# ORD_014: Template not found
# ORD_015: Stack create failed and delete completed
SUCCESS_CODE = ''
ERROR_HEAT_STACK_CREATE = 'ORD_000'
ERROR_TEMPLATE_NOT_FOUND = 'ORD_005'
ERROR_UNSUPPORTED_OPERATION = 'ORD_006'
ERROR_UNKNOWN_EXCEPTION = 'ORD_008'
ERROR_HEAT_STACK_UPDATE = 'ORD_009'
ERROR_HEAT_STACK_DELETE = 'ORD_010'
ERROR_HEAT_STACK_LOOKUP = 'ORD_012'
ERROR_TIMEOUT = 'ORD_013'
ERROR_KEYSTONE_INIT = 'ORD_016'
ERROR_CLIENT_INIT = 'ORD_017'
ERROR_REPO_INIT = 'ORD_018'
ERROR_RPC_INIT = 'ORD_019'
ERROR_REPO_TIMEOUT = 'ORD_020'
ERROR_REPO_URL = 'ORD_021'
ERROR_REPO_NOT_EXIST = 'ORD_022'
ERROR_REPO_PERMISSION = 'ORD_023'
ERROR_REPO_UNKNOWN = 'ORD_024'
ERROR_FILE_NOT_IN_REPO = 'ORD_025'
ERROR_STACK_ROLLBACK = 'ORD_015'
ERROR_CODELESS = 'ORD_XXX'
@six.add_metaclass(abc.ABCMeta)
class ORDException(Exception):
"""Base Ord Exception"""
error_code = ERROR_CODELESS
default_substitution_values = dict()
@property
def message(self):
return self.args[0]
@property
def arguments(self):
try:
values = self.args[1]
except IndexError:
values = {}
return values.copy()
@property
def substitution_values(self):
values = dict()
for cls in reversed(type(self).__mro__):
try:
values.update(cls.default_substitution_values)
except AttributeError:
pass
try:
values.update(self.__dict__['default_substitution_values'])
except KeyError:
pass
return values
@abc.abstractproperty
def message_template(self):
"""Force subclasses to define 'message_template' attribute."""
def __init__(self, *args, **kwargs):
if args and kwargs:
raise TypeError(
'You must not use *args and **kwargs in {!r}'.format(
type(self)))
if args:
super(ORDException, self).__init__(*args)
return
arguments = self.substitution_values
arguments.update(kwargs)
try:
message = self.message_template.format(**arguments)
except (KeyError, IndexError, AttributeError) as e:
raise TypeError('Unable to assemble error message. Error: {}. '
'Template: {}'.format(e, self.message_template))
super(ORDException, self).__init__(message, arguments)
def clone(self, **kwargs):
try:
arguments = self.args[1]
except IndexError:
arguments = dict()
arguments.update(kwargs)
return type(self)(**arguments)
class InternalError(ORDException):
message_template = 'Internal error'
class IntegrationError(ORDException):
message_template = ('Error during interaction with external service: '
'{details}')
default_substitution_values = {
'details': 'there is no details about this error'}
class HEATIntegrationError(IntegrationError):
message_template = ('Error during interaction with HEAT: '
'{action} - {details}')
default_substitution_values = {
'action': '(undef)'}
class HEATStackCreateError(HEATIntegrationError):
error_code = ERROR_HEAT_STACK_CREATE
default_substitution_values = {
'action': 'stacks.create'}
class HEATStackUpdateError(HEATIntegrationError):
error_code = ERROR_HEAT_STACK_UPDATE
default_substitution_values = {
'action': 'stacks.update'}
class HEATStackDeleteError(HEATIntegrationError):
error_code = ERROR_HEAT_STACK_DELETE
default_substitution_values = {
'action': 'stacks.delete'}
class HEATLookupError(HEATIntegrationError):
message_template = 'HEAT {object} not found. Query by {query}'
class HEATStackLookupError(HEATLookupError):
error_code = ERROR_HEAT_STACK_LOOKUP
default_substitution_values = {
'object': 'stack'}
class UnsupportedOperationError(ORDException):
error_code = ERROR_UNSUPPORTED_OPERATION
message_template = 'Got unsupported operation {operation!r}'
class UnsupportedTemplateTypeError(UnsupportedOperationError):
message_template = 'Got unsupported template type {template!r}'
class StackOperationError(ORDException):
message_template = ('Not able to perform {operation} operation for '
'{stack} stack.')
class PullTemplateOperationError(ORDException):
error_code = ERROR_TEMPLATE_NOT_FOUND
message_template = 'Failed to fetch template {name}.'
class StackTimeoutError(ORDException):
error_code = ERROR_TIMEOUT
message_template = ('Timeout: Not able to perform {operation} operation '
'for {stack} stack.')
class WorkerThreadError(ORDException):
message_template = 'Worker Thread ({thread_id}) was not initiated.'
class StackRollbackError(ORDException):
error_code = ERROR_STACK_ROLLBACK
message_template = ('Unable to create stack {error.message}. Rollback '
'status: {rollback_status} - {rollback_message}')
default_substitution_values = {
'rollback_status': None,
'rollback_message': None}
class KeystoneInitializationException(ORDException):
error_code = ERROR_KEYSTONE_INIT
message_template = 'Keystone authentication failed'
class ClientInitializationException(ORDException):
error_code = ERROR_CLIENT_INIT
message_template = 'Failed to initialize Heat'
class RepoTimeoutException(ORDException):
error_code = ERROR_REPO_TIMEOUT
message_template = '[{label}] '\
'Timeout occurred while trying to connect to GIT repo'
class RepoIncorrectURL(ORDException):
error_code = ERROR_REPO_URL
message_template = '[{label}] An error occurred with the GIT repo url. ' \
'Check conf file to confirm URL'
class RepoNotExist(ORDException):
error_code = ERROR_REPO_NOT_EXIST
message_template = '[{label}] '\
'Git repo is incorrect or does not exist'
class FileNotInRepo(ORDException):
error_code = ERROR_FILE_NOT_IN_REPO
message_template = '[{label}] '\
'File does not exist in this Git repo'
class RepoNoPermission(ORDException):
error_code = ERROR_REPO_PERMISSION
message_template = '[{label}] '\
'Permission denied to repo. Check SSH keys'
class RepoUnknownException(ORDException):
error_code = ERROR_REPO_UNKNOWN
message_template = '[{label}] '\
'An unknown repo exception occurred - {unknown}'
class RepoInitializationException(ORDException):
error_code = ERROR_REPO_INIT
message_template = 'Failed to connect and download repo'
class RPCInitializationException(ORDException):
error_code = ERROR_RPC_INIT
message_template = 'Failed to initialize RPC'
class RDSListenerHTTPError(ORDException):
message_template = 'RDS listener connection error.'

182
ord/common/utils.py Normal file
View File

@ -0,0 +1,182 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from enum import Enum
import multiprocessing
import six
from ord.common import exceptions as exc
OPERATION_CREATE = 'create'
OPERATION_MODIFY = 'modify'
OPERATION_DELETE = 'delete'
STATUS_SUBMITTED = 'Submitted'
STATUS_ERROR = 'Error'
STATUS_INTERNAL_ERROR = 'Error'
STATUS_SUCCESS = 'Success'
STATUS_RDS_ERROR = 'Error_RDS_Dispatch'
STATUS_RDS_SUCCESS = 'Success_RDS_Dispatch'
TEMPLATE_TYPE_HEAT = 'hot'
TEMPLATE_TYPE_ANSIBLE = 'ansible'
RESOURCE_IMAGE = 'Image'
def load_file(name):
try:
fd = open(name, 'rt')
payload = fd.read()
except IOError as e:
raise exc.InternalError(
'Can\'t load {!r}: {}'.format(e.filename, e.message))
return payload
def printable_time_interval(delay, show_ms=False):
suffixes = ['ms', 's', 'm', 'h', 'd']
chunks = []
for div in [1, 60, 60, 24]:
if not delay:
break
chunks.append(delay % div)
delay //= div
if delay:
chunks.append(delay)
if chunks:
chunks[0] *= 1000
if not show_ms:
chunks.pop(0)
suffixes.pop(0)
chunks = [int(x) for x in chunks]
result = ' '.join(reversed(
['{}{}'.format(a, b) for a, b in zip(chunks, suffixes)]))
if not result:
result = '0ms'
return result
def cpu_count():
try:
return multiprocessing.cpu_count() or 1
except NotImplementedError:
return 1
# FIXME(db2242): unused!
def update_nested(original_dict, updates):
"""Updates the leaf nodes in a nest dict.
Updates occur without replacing entire sub-dicts.
"""
dict_to_update = copy.deepcopy(original_dict)
for key, value in six.iteritems(updates):
if isinstance(value, dict):
sub_dict = update_nested(dict_to_update.get(key, {}), value)
dict_to_update[key] = sub_dict
else:
dict_to_update[key] = updates[key]
return dict_to_update
def create_rds_payload(template, template_target):
resource_id = template.get('resource_id')
region = template.get('region')
operation = template.get('resource_operation')
request_id = template.get('request_id')
resource_template_type = template.get('template_type')
resource_type = template_target.get('resource_type')
template_version = template_target.get('resource_template_version')
ord_notifier_id = template_target.get('template_status_id')
status = template_target.get('status')
error_code = template_target.get('error_code')
error_msg = template_target.get('error_msg')
payload = {"rds-listener":
{"request-id": request_id,
"resource-id": resource_id,
"resource-type": resource_type,
"resource-template-version": template_version,
"resource-template-type": resource_template_type,
"resource-operation": operation,
"ord-notifier-id": ord_notifier_id,
"region": region,
"status": status,
"error-code": error_code,
"error-msg": error_msg,
}
}
return payload
# FIXME(db2242): remove it
class ErrorCode(Enum):
ORD_NOERROR = ""
ORD_000 = "ORD_000"
ORD_001 = "ORD_001"
ORD_002 = "ORD_002"
ORD_003 = "ORD_003"
ORD_004 = "ORD_004"
ORD_005 = "ORD_005"
ORD_006 = "ORD_006"
ORD_007 = "ORD_007"
ORD_008 = "ORD_008"
ORD_009 = "ORD_009"
ORD_010 = "ORD_010"
ORD_011 = "ORD_011"
ORD_012 = "ORD_012"
ORD_013 = "ORD_013"
ORD_014 = "ORD_014"
ORD_015 = "ORD_015"
ORD_016 = "ORD_016"
ORD_017 = "ORD_017"
ORD_018 = "ORD_018"
ORD_019 = "ORD_019"
def __getattr__(self, code):
if code in self:
return code
raise AttributeError
@classmethod
def tostring(cls, errorCode):
ord_err = {'ORD_000': 'Stack Creation Failed',
'ORD_001': 'Template already exists',
'ORD_002': 'Template already submitted and in process',
'ORD_003': 'Template submission timed out',
'ORD_004': 'Template submission failed',
'ORD_005': 'Unable to pull Template',
'ORD_006': 'Unsupported operation',
'ORD_007': 'Thread not found',
'ORD_008': 'Unknown Exception',
'ORD_009': 'Stack Modification Failed',
'ORD_010': 'Stack Deletion Failed',
'ORD_011': 'Not Able to Retrieve Stack Status',
'ORD_012': 'Stack not found',
'ORD_013': 'Stack Time Out Exception',
'ORD_014': 'Template not found',
'ORD_015': 'Stack create failed and delete completed',
'ORD_016': 'Keystone failed to initialize',
'ORD_017': 'Clients failed to initialize',
'ORD_018': 'Failed to initialize and download repo',
'ORD_019': 'Fail to communicate to message broker',
'ORD_NOERROR': ''}
return dict.get(ord_err, errorCode)

0
ord/db/__init__.py Normal file
View File

62
ord/db/api.py Normal file
View File

@ -0,0 +1,62 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Defines interface for DB access.
Functions in this module are imported into the ranger-agent.db namespace.
Call these functions from ranger-agent.db namespace, not the
ranger-agent.db.api namespace.
All functions in this module return objects that implement a dictionary-like
interface. Currently, many of these objects are sqlalchemy objects that
implement a dictionary interface. However, a future goal is to have all of
these objects be simple dictionaries.
"""
from oslo_config import cfg
from oslo_db import concurrency
from oslo_log import log as logging
CONF = cfg.CONF
_BACKEND_MAPPING = {'sqlalchemy': 'ord.db.sqlalchemy.api'}
IMPL = concurrency.TpoolDbapiWrapper(CONF, backend_mapping=_BACKEND_MAPPING)
LOG = logging.getLogger(__name__)
def create_template(*values):
return IMPL.create_template(*values)
def retrieve_template(request_id):
return IMPL.retrieve_template(request_id)
def retrieve_target(request_id):
return IMPL.retrieve_target(request_id)
def retrieve_target_by_status(template_status_id):
return IMPL.retrieve_target(template_status_id)
def update_target_data(template_status_id, status,
error_code, error_msg):
return IMPL.update_target_data(template_status_id, status,
error_code, error_msg)

35
ord/db/base.py Normal file
View File

@ -0,0 +1,35 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for classes that need modular database access."""
from oslo_config import cfg
from oslo_utils import importutils
db_driver_opt = cfg.StrOpt('db_driver',
default='ord.db',
help='The driver to use for database access')
CONF = cfg.CONF
CONF.register_opt(db_driver_opt)
class Base(object):
"""DB driver is injected in the init method."""
def __init__(self, db_driver=None):
super(Base, self).__init__()
if not db_driver:
db_driver = CONF.db_driver
self.db = importutils.import_module(db_driver)

34
ord/db/migration.py Normal file
View File

@ -0,0 +1,34 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Database setup and migration commands."""
from ord.db.sqlalchemy import migration
IMPL = migration
def db_sync(version=None, database='ord'):
"""Migrate the database to `version` or the most recent version."""
return IMPL.db_sync(version=version, database=database)
def db_version(database='ord'):
"""Display the current database version."""
return IMPL.db_version(database=database)
def db_initial_version(database='ord'):
"""The starting version for the database."""
return IMPL.db_initial_version(database=database)

51
ord/db/model_base.py Normal file
View File

@ -0,0 +1,51 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.ext import declarative
from sqlalchemy import orm
from oslo.db.sqlalchemy import models
class ORDBase(models.ModelBase):
"""Base class for ORD Models."""
__table_args__ = {'mysql_engine': 'InnoDB'}
def __iter__(self):
self._i = iter(orm.object_mapper(self).columns)
return self
def next(self):
n = self._i.next().name
return n, getattr(self, n)
def __repr__(self):
"""sqlalchemy based automatic __repr__ method."""
items = ['%s=%r' % (col.name, getattr(self, col.name))
for col in self.__table__.columns]
return "<%s.%s[object at %x] {%s}>" % (self.__class__.__module__,
self.__class__.__name__,
id(self), ', '.join(items))
class ORDBaseV2(ORDBase):
@declarative.declared_attr
def __tablename__(cls):
# NOTE(jkoelker) use the pluralized name of the class as the table
return cls.__name__.lower() + 's'
BASEV2 = declarative.declarative_base(cls=ORDBaseV2)

View File

204
ord/db/sqlalchemy/api.py Normal file
View File

@ -0,0 +1,204 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of SQLAlchemy backend."""
import sys
import threading
from ord.db.sqlalchemy import models
from oslo_config import cfg
from oslo_db import options as oslo_db_options
from oslo_db.sqlalchemy import session as db_session
from oslo_db.sqlalchemy import utils as sqlalchemyutils
from oslo_log import log as logging
CONF = cfg.CONF
api_db_opts = [
cfg.StrOpt('db_connection',
help='The SQLAlchemy connection string to use to connect to '
'the ORD database.',
secret=True),
cfg.StrOpt('mysql_sql_mode',
default='TRADITIONAL',
help='The SQL mode to be used for MySQL sessions. '
'This option, including the default, overrides any '
'server-set SQL mode. To use whatever SQL mode '
'is set by the server configuration, '
'set this to no value. Example: mysql_sql_mode='),
]
opt_group = cfg.OptGroup(name='database',
title='Options for the database service')
CONF.register_group(opt_group)
CONF.register_opts(oslo_db_options.database_opts, opt_group)
CONF.register_opts(api_db_opts, opt_group)
LOG = logging.getLogger(__name__)
_ENGINE_FACADE = {'ord': None}
_ORD_API_FACADE = 'ord'
_LOCK = threading.Lock()
def _create_facade(conf_group):
return db_session.EngineFacade(
sql_connection=conf_group.db_connection,
autocommit=True,
expire_on_commit=False,
mysql_sql_mode=conf_group.mysql_sql_mode,
idle_timeout=conf_group.idle_timeout,
connection_debug=conf_group.connection_debug,
connection_trace=conf_group.connection_trace,
max_retries=conf_group.max_retries)
def _create_facade_lazily(facade, conf_group):
global _LOCK, _ENGINE_FACADE
if _ENGINE_FACADE[facade] is None:
with _LOCK:
if _ENGINE_FACADE[facade] is None:
_ENGINE_FACADE[facade] = _create_facade(conf_group)
return _ENGINE_FACADE[facade]
def get_engine(use_slave=False):
conf_group = CONF.database
facade = _create_facade_lazily(_ORD_API_FACADE, conf_group)
return facade.get_engine(use_slave=use_slave)
def get_api_engine():
conf_group = CONF.database
facade = _create_facade_lazily(_ORD_API_FACADE, conf_group)
return facade.get_engine()
def get_session(use_slave=False, **kwargs):
conf_group = CONF.database
facade = _create_facade_lazily(_ORD_API_FACADE, conf_group)
return facade.get_session(use_slave=use_slave, **kwargs)
def get_backend():
"""The backend is this module itself."""
return sys.modules[__name__]
def create_template(values):
LOG.debug('Create Template : %r', values)
session = get_session()
with session.begin():
template_ref = models.Ord_Notification()
template_ref.update(values)
template_ref.save(session=session)
error_code = None
error_msg = None
if 'error_code' in values:
error_code = values['error_code']
error_msg = values['error_msg']
set_target_data(template_ref,
values['template_status_id'],
values['resource_name'],
values['resource_type'],
values['resource_template_version'],
values['status'],
error_code,
error_msg,
session)
def create_target(values, session=None):
target_ref = models.Target_Resource()
target_ref.update(values)
target_ref.save(session=session)
def set_target_data(template_ref, template_status_id,
resource_name, resource_type,
resource_template_version, status,
error_code, error_msg, session):
values = {'template_status_id': template_status_id,
'request_id': template_ref.request_id,
'resource_name': resource_name,
'resource_template_version': resource_template_version,
'resource_type': resource_type,
'status': status,
'error_code': error_code,
'error_msg': error_msg}
create_target(values, session)
def model_query(model,
args=None,
session=None):
"""Query helper
:param model: Model to query. Must be a subclass of ModelBase.
:param args: Arguments to query. If None - model is used.
:param session: If present, the session to use.
"""
if session is None:
session = get_session()
query = sqlalchemyutils.model_query(model, session, args)
return query
def update_target_data(template_status_id, status,
error_code=None, error_msg=None):
LOG.debug('Update status of %s to %s' % (template_status_id, status))
if error_msg:
error_msg = error_msg[:255]
session = get_session()
with session.begin():
query = model_query(models.Target_Resource, session=session)
query = query.filter_by(template_status_id=template_status_id)
query.update({'status': status,
'error_code': error_code,
'error_msg': error_msg})
def retrieve_template(request_id):
LOG.debug('Retrieve Notification By %s', request_id)
session = get_session()
query = model_query(models.Ord_Notification, session=session)
query = query.filter_by(request_id=request_id)
return query.first()
def retrieve_target_by_status(template_status_id):
LOG.debug('Retrieve Target data %s by status id', template_status_id)
session = get_session()
query = model_query(models.Target_Resource, session=session)
query = query.filter_by(template_status_id=template_status_id)
return query.first()
def retrieve_target(request_id):
LOG.debug('Retrieve Target data %s', request_id)
session = get_session()
query = model_query(models.Target_Resource, session=session)
query = query.filter_by(request_id=request_id)
return query.first()

View File

@ -0,0 +1,20 @@
#!/usr/bin/env python
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.versioning.shell import main
if __name__ == '__ord__':
main(debug='False', repository='.')

View File

@ -0,0 +1,20 @@
[db_settings]
# Used to identify which repository this database is versioned under.
# You can use the name of your project.
repository_id=ord
# The name of the database table used to track the schema version.
# This name shouldn't already be used by your project.
# If this is changed once a database is under version control, you'll need to
# change the table name in each database too.
version_table=migrate_version
# When committing a change script, Migrate will attempt to generate the
# sql for all supported databases; normally, if one of them fails - probably
# because you don't have that database installed - it is ignored and the
# commit continues, perhaps ending successfully.
# Databases in this list MUST compile successfully during a commit, or the
# entire commit will fail. List the databases your application will actually
# be using to ensure your updates to that database work properly.
# This must be a list; example: ['postgres','sqlite']
required_dbs=[]

View File

@ -0,0 +1,89 @@
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import dialects
from sqlalchemy import ForeignKeyConstraint, MetaData, String, Table
from sqlalchemy import Text
LOG = logging.getLogger(__name__)
# Note on the autoincrement flag: this is defaulted for primary key columns
# of integral type, so is no longer set explicitly in such cases.
def MediumText():
return Text().with_variant(dialects.mysql.MEDIUMTEXT(), 'mysql')
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
ord_notification = Table('ord_notification', meta,
Column(
'request_id', String(length=50),
primary_key=True, nullable=False),
Column('resource_id', String(length=80)),
Column('template_type', String(length=50)),
Column('resource_operation', String(length=20)),
Column('region', String(length=32)),
Column('time_stamp', DateTime(timezone=False)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
target_resource = Table('target_resource', meta,
Column('template_status_id', String(
length=50), primary_key=True, nullable=False),
Column('request_id', String(length=50)),
Column('resource_template_version',
String(length=50)),
Column('resource_name', String(length=80)),
Column('resource_type', String(length=50)),
Column('status', String(length=32),
nullable=False),
Column('error_code', String(length=32)),
Column('error_msg', String(length=255)),
ForeignKeyConstraint(
['request_id'],
['ord_notification.request_id']),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
tables = [ord_notification, target_resource]
for table in tables:
try:
table.create()
except Exception:
LOG.info(repr(table))
LOG.exception('Exception while creating table.')
raise
if migrate_engine.name == 'mysql':
# In Folsom we explicitly converted migrate_version to UTF8.
migrate_engine.execute(
'ALTER TABLE migrate_version CONVERT TO CHARACTER SET utf8')
# Set default DB charset to UTF8.
migrate_engine.execute(
'ALTER DATABASE %s DEFAULT CHARACTER SET utf8' %
migrate_engine.url.database)
def downgrade(migrate_engine):
raise NotImplementedError('Downgrade is not implemented.')

View File

@ -0,0 +1,90 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sqlalchemy
from migrate import exceptions as versioning_exceptions
from migrate.versioning import api as versioning_api
from migrate.versioning.repository import Repository
from ord.db.sqlalchemy import api as db_session
from oslo_log import log as logging
INIT_VERSION = {}
INIT_VERSION['ord'] = 0
_REPOSITORY = {}
LOG = logging.getLogger(__name__)
def get_engine(database='ord'):
return db_session.get_engine()
def db_sync(version=None, database='ord'):
if version is not None:
try:
version = int(version)
except ValueError as exc:
LOG.exception(exc)
# raise exception("version should be an integer")
current_version = db_version(database)
repository = _find_migrate_repo(database)
if version is None or version > current_version:
return versioning_api.upgrade(get_engine(database), repository,
version)
else:
return versioning_api.downgrade(get_engine(database), repository,
version)
def db_version(database='ord'):
repository = _find_migrate_repo(database)
try:
return versioning_api.db_version(get_engine(database), repository)
except versioning_exceptions.DatabaseNotControlledError as exc:
meta = sqlalchemy.MetaData()
engine = get_engine(database)
meta.reflect(bind=engine)
tables = meta.tables
if len(tables) == 0:
db_version_control(INIT_VERSION[database], database)
return versioning_api.db_version(get_engine(database), repository)
else:
LOG.exception(exc)
def db_initial_version(database='ord'):
return INIT_VERSION[database]
def db_version_control(version=None, database='ord'):
repository = _find_migrate_repo(database)
versioning_api.version_control(get_engine(database), repository, version)
return version
def _find_migrate_repo(database='ord'):
"""Get the path for the migrate repository."""
global _REPOSITORY
rel_path = 'migrate_repo'
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
rel_path)
assert os.path.exists(path)
if _REPOSITORY.get(database) is None:
_REPOSITORY[database] = Repository(path)
return _REPOSITORY[database]

115
ord/db/sqlalchemy/models.py Normal file
View File

@ -0,0 +1,115 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models for ranger-agent data.
"""
import datetime
import uuid
from sqlalchemy import (Column, DateTime, String)
from sqlalchemy import ForeignKey, Text
from sqlalchemy import orm
from sqlalchemy.dialects.mysql import MEDIUMTEXT
from sqlalchemy.ext.declarative import declarative_base
from oslo_config import cfg
from oslo_db.sqlalchemy import models
CONF = cfg.CONF
BASE = declarative_base()
def MediumText():
return Text().with_variant(MEDIUMTEXT(), 'mysql')
class ORDBase(models.ModelBase):
metadata = None
def __copy__(self):
"""Implement a safe copy.copy().
SQLAlchemy-mapped objects travel with an object
called an InstanceState, which is pegged to that object
specifically and tracks everything about that object. It's
critical within all attribute operations, including gets
and deferred loading. This object definitely cannot be
shared among two instances, and must be handled.
The copy routine here makes use of session.merge() which
already essentially implements a "copy" style of operation,
which produces a new instance with a new InstanceState and copies
all the data along mapped attributes without using any SQL.
The mode we are using here has the caveat that the given object
must be "clean", e.g. that it has no database-loaded state
that has been updated and not flushed. This is a good thing,
as creating a copy of an object including non-flushed, pending
database state is probably not a good idea; neither represents
what the actual row looks like, and only one should be flushed.
"""
session = orm.Session()
copy = session.merge(self, load=False)
session.expunge(copy)
return copy
def save(self, session=None):
from ord.db.sqlalchemy import api
if session is None:
session = api.get_session()
super(ORDBase, self).save(session=session)
def __repr__(self):
"""sqlalchemy based automatic __repr__ method."""
items = ['%s=%r' % (col.name, getattr(self, col.name))
for col in self.__table__.columns]
return "<%s.%s[object at %x] {%s}>" % (self.__class__.__module__,
self.__class__.__name__,
id(self), ', '.join(items))
class Ord_Notification(BASE, ORDBase):
__tablename__ = 'ord_notification'
request_id = Column(String(50), primary_key=True, nullable=False)
resource_id = Column(String(80))
template_type = Column(String(50), default='hot')
resource_operation = Column(String(20))
region = Column(String(32))
time_stamp = Column(DateTime(timezone=False),
default=datetime.datetime.now())
class Target_Resource(BASE, ORDBase):
__tablename__ = 'target_resource'
template_status_id = Column(String(50),
primary_key=True,
default=lambda: str(uuid.uuid4()))
request_id = Column(String(50),
ForeignKey('ord_notification.request_id'),
nullable=False)
resource_template_version = Column(String(50), nullable=False)
resource_name = Column(String(80), nullable=False)
resource_type = Column(String(50))
status = Column(String(32))
error_code = Column(String(32))
error_msg = Column(String(255))

0
ord/engine/__init__.py Normal file
View File

41
ord/engine/app.py Normal file
View File

@ -0,0 +1,41 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ord.engine.engine import Engine
from ord.engine.engine import QueueHandler
from oslo_config import cfg
import oslo_messaging as messaging
def start():
engine = Engine()
# start Notify message listener
transport = messaging.get_transport(cfg.CONF)
target = messaging.Target(topic='ord-notifier-q', server=cfg.CONF.host)
endpoints = [QueueHandler(engine)]
server = messaging.get_rpc_server(transport,
target,
endpoints,
executor='blocking')
try:
server.start()
server.wait()
except KeyboardInterrupt:
# Add termination handling here
pass

95
ord/engine/engine.py Normal file
View File

@ -0,0 +1,95 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from multiprocessing import Process
import os
from oslo_config import cfg
from ord.client import rpcengine
from ord.engine.workerfactory import WorkerFactory
from ord.openstack.common import log as logging
OPTS = [
cfg.StrOpt('local_repo',
default='aic-orm-resources-labs',
help='local repo from where the'
'template yaml can be accessed from'),
cfg.StrOpt('region',
default='local',
help='Region'),
]
cfg.CONF.register_opts(OPTS)
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class QueueHandler(object):
def __init__(self, engine):
super(QueueHandler, self).__init__()
self._engine = engine
self._rpcengine = rpcengine.RpcEngine()
self.factory = WorkerFactory()
def invoke_notifier_rpc(self, ctxt, payload):
LOG.debug("\n----- message from API -----")
LOG.debug("\n Payload: %s \nctxt: %s "
% (str(payload), str(ctxt)))
LOG.debug("\n-------------------------------\n")
d = eval(payload)
template_type = d["template_type"]
resource_name = d["resource_name"]
resource_type = d["resource_type"]
operation = d["resource_operation"]
template_status_id = d["template_status_id"]
region = d["region"]
stack_name = resource_name[:resource_name.index(".")]
path_to_tempate = os.path.join(region, template_type,
resource_type, resource_name)
worker = self.factory.getWorker(operation, path_to_tempate,
stack_name, template_status_id,
resource_type, template_type)
self.factory.execute(worker, operation)
class Engine(object):
"""This class provides functionality which allows to interact the
basic ORD clients.
"""
def __init__(self):
"""Initialize an engine.
:return: instance of the engine class
"""
super(Engine, self).__init__()
# FIXME self.factory = WorkerFactory()
def _execute(self):
"""Start the process activity."""
LOG.info("Waiting for a message...")
def start(self):
process = Process(target=self._execute)
try:
"""Start the engine."""
LOG.info("Starting the engine... (Press CTRL+C to quit)")
process.start()
process.join()
except KeyboardInterrupt:
process.terminate()

486
ord/engine/workerfactory.py Executable file
View File

@ -0,0 +1,486 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
import json
from oslo_config import cfg
import random
import six
import sys
import threading
import time
from ord.client import getrepo
from ord.client import heat
from ord.client import rpcengine
from ord.common import exceptions as exc
from ord.common import utils
from ord.db.sqlalchemy import api as db_api
from ord.openstack.common import log as logging
CONF = cfg.CONF
CONF.register_opts([
cfg.StrOpt('local_repo', default='aic-orm-resources-labs',
help='local repo from where the'
'template yaml can be accessed from'),
cfg.IntOpt('heat_poll_interval', default=5,
help='delay in seconds between two consecutive call to '
'heat.stacks.status'),
cfg.IntOpt('resource_status_check_wait', default=10,
help='delay in seconds between two retry call to '
'rds listener repo'),
cfg.Opt('retry_limits',
default='3',
help='number of retry'),
cfg.IntOpt('resource_creation_timeout_min', default=1200,
help='max wait time for flavor and customer stacks'),
cfg.IntOpt('resource_creation_timeout_max', default=14400,
help='max wait time for image stacks')
])
LOG = logging.getLogger(__name__)
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = \
super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
@six.add_metaclass(Singleton)
class WorkerFactory(object):
_instance = None
_temp_repo_client = None
_heat_client = None
_glance_client = None
_db_client = None
_rpcengine = None
_client_initialize = False
_threadPool = {}
_init_error = None
@staticmethod
def _client_init():
LOG.info("Initializing all clients :: %s",
str(WorkerFactory._client_initialize))
WorkerThread._init_error = None
try:
try:
WorkerThread._temp_repo_client = \
getrepo.TemplateRepoClient(CONF.local_repo)
except exc.RepoInitializationException as repoexp:
LOG.critical("Failed to initialize Repo %s " % repoexp)
WorkerThread._init_error = utils.ErrorCode.ORD_018.value
try:
WorkerThread._heat_client = heat.HeatClient()
except exc.KeystoneInitializationException as kcexp:
LOG.critical("Failed to initialize Keystone %s " % kcexp)
WorkerThread._init_error = utils.ErrorCode.ORD_016.value
try:
WorkerThread._rpcengine = rpcengine.RpcEngine()
except exc.RPCInitializationException as rpcexp:
LOG.critical("Failed to initialize RPC %s " % rpcexp)
WorkerThread._init_error = utils.ErrorCode.ORD_019.value
except Exception as exception:
WorkerThread._init_error = utils.ErrorCode.ORD_017.value
LOG.critical(
"Unexpected error while initializing clients %s" % exception)
finally:
WorkerThread._threadPool = {}
if WorkerThread._init_error is None:
WorkerThread._client_initialize = True
@classmethod
def removeWorker(cls, idnr):
LOG.info("Deleting thread : " + str(idnr))
try:
del WorkerThread._threadPool[idnr]
except KeyError:
LOG.info("Thread was not found for deletion")
raise exc.WorkerThreadError(thread_id=idnr)
LOG.info("Thread was deleted : " + str(idnr))
def __init__(self):
LOG.info("initializing WorkerFactory._init_")
if WorkerFactory._client_initialize is False:
WorkerFactory._client_init()
WorkerThread._client_initialize = True
def getWorker(self, operation, path_to_tempate, stack_name,
template_status_id, resource_type,
template_type):
template_type = template_type.lower()
# FIXME(db2242): this code have a none zero to fail in very unexpected
# way
threadID = random.randint(0, 99999999)
if template_type == "hot":
miniWorker = WorkerThread(threadID, operation,
path_to_tempate, stack_name,
template_status_id, resource_type,
WorkerThread._init_error)
WorkerThread._threadPool.update({threadID: miniWorker})
elif template_type == "ansible":
threadID = -1
else:
# FIXME(db2242): too late for such check
raise exc.UnsupportedTemplateTypeError(template=template_type)
return threadID
def execute(self, idnr, operation):
try:
worker = WorkerThread._threadPool[idnr]
except KeyError:
raise exc.WorkerThreadError(thread_id=idnr)
worker.start()
class WorkerThread(threading.Thread):
def __init__(self, threadID, operation, path_to_tempate, stack_name,
template_status_id, resource_type, client_error=None):
LOG.info("initializing Thread._init_")
threading.Thread.__init__(self)
self.threadID = threadID
self.operation = operation
self.template_path = path_to_tempate
self.stack_name = stack_name
self.template_status_id = template_status_id
self.resource_type = resource_type
self.client_error = client_error
def extract_resource_extra_metadata(self, rds_payload, rds_status):
if self.resource_type.lower() == 'image' \
and rds_status == utils.STATUS_SUCCESS:
stack = self._heat_client.get_stack_by_name(self.stack_name)
image_data = self._heat_client.get_image_data_by_stackid(stack.id)
if image_data:
rds_payload.get('rds-listener').update(
{'resource_extra_metadata':
{'checksum': image_data['checksum'],
'size': str(image_data['size']),
'virtual_size':
str(image_data['virtual_size'])}})
def _prepare_rds_payload(self):
target_data = db_api.retrieve_target_by_status(self.template_status_id)
notify_data = {}
if 'request_id' in target_data:
notify_data = db_api.retrieve_template(
target_data.get('request_id'))
payload = utils.create_rds_payload(notify_data, target_data)
return payload
def run(self):
LOG.debug("Thread Starting :: %s", self.threadID)
LOG.debug("operation=%s, stack_name=%s, path_to_tempate=%s",
self.operation, self.stack_name, self.template_path)
try:
if self._is_engine_initialized():
LOG.debug('Client initialization complete')
try:
self._execute_operation()
except exc.ORDException as e:
LOG.error('%s', e.message)
self._update_permanent_storage(e)
except Exception as e:
LOG.critical('Unhandled exception into %s',
type(self).__name__, exc_info=True)
self._update_permanent_storage(e)
else:
self._update_permanent_storage()
try:
self._send_operation_results()
except Exception:
LOG.critical('ORD_019 - INCOMPLETE OPERATION! Error during '
'sending operation results. Called will never '
'know about issue.')
raise
except Exception:
LOG.critical('Unhandled exception into %s', type(self).__name__,
exc_info=True)
finally:
LOG.info("Thread Exiting :: %s", self.threadID)
WorkerFactory.removeWorker(self.threadID)
def _is_engine_initialized(self):
args = {}
if self.client_error is not None:
args['error_code'] = self.client_error
args['error_msg'] = utils.ErrorCode.tostring(self.client_error)
LOG.debug('Updating DB with %s code with %s '
% (args['error_code'], args['error_msg']))
db_api.update_target_data(
self.template_status_id, utils.STATUS_ERROR, **args)
return False
return True
def _execute_operation(self):
template = self._fetch_template()
if self.operation == utils.OPERATION_CREATE:
stack = self._create_stack(template)
elif self.operation == utils.OPERATION_MODIFY:
stack = self._update_stack(template)
elif self.operation == utils.OPERATION_DELETE:
stack = self._delete_stack()
else:
raise exc.UnsupportedOperationError(operation=self.operation)
try:
self._wait_for_heat(stack, self.operation)
except exc.StackOperationError:
_, e, _tb = sys.exc_info()
if e.arguments['operation'] != utils.OPERATION_CREATE:
raise
args = {}
try:
self._delete_stack()
self._wait_for_heat(
e.arguments['stack'], utils.OPERATION_DELETE)
except exc.StackOperationError as e_rollback:
args['rollback_error'] = e_rollback
args['rollback_message'] = e_rollback.message
args['rollback_status'] = False
else:
args['rollback_status'] = True
raise exc.StackRollbackError(error=e, **args)
def _update_permanent_storage(self, error=None):
args = {}
if isinstance(error, exc.StackOperationError):
status = utils.STATUS_ERROR
args['error_msg'] = error.message
args['error_code'] = error.error_code
try:
rollback = error.arguments['rollback_status']
except KeyError:
pass
else:
if rollback:
rollback_message = 'success'
else:
rollback_message = error.arguments.get(
'rollback_message', 'fail')
glue = '\n[ROLLBACK] '
rollback_message = glue.join(rollback_message.split('\n'))
args['error_msg'] = glue.join(
(args['error_msg'], rollback_message))
elif isinstance(error, exc.ORDException):
status = utils.STATUS_INTERNAL_ERROR
args['error_msg'] = error.message
args['error_code'] = error.error_code
elif isinstance(error, Exception):
status = utils.STATUS_INTERNAL_ERROR
args['error_msg'] = str(error)
args['error_code'] = exc.ERROR_UNKNOWN_EXCEPTION
else:
args['error_code'] = exc.SUCCESS_CODE
status = utils.STATUS_SUCCESS
db_api.update_target_data(
self.template_status_id, status, **args)
def _send_operation_results(self):
rds_payload = self._prepare_rds_payload()
res_ctxt = {'request-id': rds_payload.get('request-id')}
LOG.debug("----- RPC API Payload to RDS %r", rds_payload)
status_original = rds_payload.get('rds-listener')['status']
try:
self.extract_resource_extra_metadata(rds_payload, status_original)
except Exception as exception:
LOG.error("Unexpected error collecting extra \
Image Parameter %s" % exception)
max_range = int(CONF.orm.retry_limits)
self._rpcengine. \
invoke_listener_rpc(res_ctxt, json.dumps(rds_payload))
while max_range - 1 > 0:
LOG.debug('Waiting for invoke listener')
time.sleep(CONF.resource_status_check_wait)
target_data = db_api.retrieve_target_by_status(
self.template_status_id)
status = target_data.get('status')
if status == utils.STATUS_RDS_ERROR:
LOG.debug("Retrying for RDS listener response %s", max_range)
rds_payload.get('rds-listener')['status'] = status_original
# if image_payload:
# rds_payload.get('rds-listener')['status'] = image_payload
self._rpcengine. \
invoke_listener_rpc(res_ctxt, json.dumps(rds_payload))
if status != utils.STATUS_RDS_SUCCESS:
LOG.debug("Retrying for api response")
max_range = max_range - 1
else:
break
def _fetch_template(self):
"""Fetch template from document storage
Template fetching will be skipped if current operation does not require
template.
"""
if self.operation not in (
utils.OPERATION_CREATE,
utils.OPERATION_MODIFY):
return
LOG.debug("template path: %r", self.template_path)
return self._temp_repo_client.pull_template(
CONF.local_repo, self.template_path)
def _create_stack(self, template):
LOG.debug("Creating stack name %s by template %s",
self.stack_name, self.template_path)
# This call return raw response(dict), but all other calls to heat
# client return "models" build from raw responses. Look like this a
# BUG into heatclient. This behavior is not fixed until now (1.2.0).
stack = self._heat_client.create_stack(self.stack_name, template)
stack = stack['stack']
return self._heat_client.get_stack(stack['id'])
def _update_stack(self, template):
LOG.debug("Updating stack id %s by template %s",
self.stack_name, self.template_path)
stack = self._heat_client.get_stack_by_name(self.stack_name)
self._heat_client.update_stack(stack.id, template)
return stack
def _delete_stack(self):
LOG.info("Deleting stack %r", self.stack_name)
stack = self._heat_client.get_stack_by_name(self.stack_name)
self._heat_client.delete_stack(stack.id)
return stack
def _wait_for_heat(self, stack, operation):
LOG.debug('Wait while HEAT do his job: stack=%s', self.stack_name)
poll_interval = CONF.heat_poll_interval
LOG.debug("HEAT poll interval: %s", poll_interval)
max_wait_time = 0
if self.resource_type == 'image':
max_wait_time = CONF.resource_creation_timeout_max
else:
max_wait_time = CONF.resource_creation_timeout_min
LOG.debug("max_wait_time: %s", max_wait_time)
stack_status_transitions = StatusTransitions(stack.stack_status)
start_time = time.time()
waiting_time = 0
status_check = HEATIntermediateStatusChecker(stack, operation)
while status_check(stack) \
and (waiting_time <= max_wait_time):
time.sleep(poll_interval)
waiting_time = time.time() - start_time
LOG.debug('%s waiting %s for %s',
self.threadID, waiting_time,
stack.stack_name)
LOG.debug('%s stack status transition: %s',
self.threadID, stack_status_transitions)
stack = self._heat_client.get_stack(stack.id)
stack_status_transitions.add(stack.stack_status)
LOG.debug('%s done with waiting for stack %s: action=%s, status=%s',
self.threadID, stack.stack_name, status_check.action,
status_check.status)
if status_check.is_fail:
raise exc.StackOperationError(operation=operation, stack=stack)
elif status_check.is_in_progress:
raise exc.StackTimeoutError(operation=operation, stack=stack)
class StatusTransitions(object):
def __init__(self, status):
self.transitions = [status]
self.hits = [1]
def add(self, status):
if self.transitions[-1] != status:
self.transitions.append(status)
self.hits.append(0)
self.hits[-1] += 1
def __str__(self):
chunks = []
for status, hits in itertools.izip(self.transitions, self.hits):
if 1 < hits:
status = '{}({})'.format(status, hits)
chunks.append(status)
return ' ~> '.join(chunks)
class HEATIntermediateStatusChecker(object):
ACTION_CREATE = 'CREATE'
ACTION_UPDATE = 'UPDATE'
ACTION_DELETE = 'DELETE'
STATUS_IN_PROGRESS = 'IN_PROGRESS'
STATUS_COMPLETE = 'COMPLETE'
STATUS_FAIL = 'FAILED'
_operation_to_heat_action_map = {
utils.OPERATION_CREATE: ACTION_CREATE,
utils.OPERATION_MODIFY: ACTION_UPDATE,
utils.OPERATION_DELETE: ACTION_DELETE}
def __init__(self, stack, operation):
self.stack = stack
self.expect_action = self._operation_to_heat_action_map[operation]
self.action, self.status = self._extract_action_and_status(self.stack)
def __call__(self, stack):
self.action, self.status = self._extract_action_and_status(stack)
check = [
self.status == self.STATUS_IN_PROGRESS,
self.action != self.expect_action]
if self.expect_action == self.ACTION_UPDATE:
check.append(self.stack.updated_time == stack.updated_time)
return any(check)
@property
def is_fail(self):
return self.status == self.STATUS_FAIL
@property
def is_in_progress(self):
return self.status == self.STATUS_IN_PROGRESS
@staticmethod
def _extract_action_and_status(stack):
try:
action, status = stack.stack_status.split('_', 1)
except ValueError:
raise exc.HEATIntegrationError(
details='Invalid value in stack.stack_status: {!r}'.format(
stack.stack_status))
return action, status

0
ord/hacking/__init__.py Normal file
View File

139
ord/hacking/checks.py Normal file
View File

@ -0,0 +1,139 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import pep8
# Guidelines for writing new hacking checks
#
# - Use only for Neutron specific tests. OpenStack general tests
# should be submitted to the common 'hacking' module.
# - Pick numbers in the range N3xx. Find the current test with
# the highest allocated number and then pick the next value.
# - Keep the test method code in the source file ordered based
# on the N3xx value.
# - List the new rule in the top level HACKING.rst file
# - Add test cases for each new rule to
# neutron/tests/unit/hacking/test_checks.py
_all_log_levels = {
'error': '_LE',
'info': '_LI',
'warn': '_LW',
'warning': '_LW',
'critical': '_LC',
'exception': '_LE',
}
_all_hints = set(_all_log_levels.values())
def _regex_for_level(level, hint):
return r".*LOG\.%(level)s\(\s*((%(wrong_hints)s)\(|'|\")" % {
'level': level,
'wrong_hints': '|'.join(_all_hints - set([hint])),
}
log_translation_hint = re.compile(
'|'.join('(?:%s)' % _regex_for_level(level, hint)
for level, hint in _all_log_levels.iteritems()))
oslo_namespace_imports_dot = re.compile(r"import[\s]+oslo[.][^\s]+")
oslo_namespace_imports_from_dot = re.compile(r"from[\s]+oslo[.]")
oslo_namespace_imports_from_root = re.compile(r"from[\s]+oslo[\s]+import[\s]+")
def validate_log_translations(logical_line, physical_line, filename):
# Translations are not required in the test directory
if "ord/tests" in filename:
return
if pep8.noqa(physical_line):
return
msg = "N320: Log messages require translation hints!"
if log_translation_hint.match(logical_line):
yield (0, msg)
def use_jsonutils(logical_line, filename):
msg = "N321: jsonutils.%(fun)s must be used instead of json.%(fun)s"
if "json." in logical_line:
json_funcs = ['dumps(', 'dump(', 'loads(', 'load(']
for f in json_funcs:
pos = logical_line.find('json.%s' % f)
if pos != -1:
yield (pos, msg % {'fun': f[:-1]})
def no_translate_debug_logs(logical_line, filename):
"""Check for 'LOG.debug(_(' and 'LOG.debug(_Lx('
As per our translation policy,
https://wiki.openstack.org/wiki/LoggingStandards#Log_Translation
we shouldn't translate debug level logs.
* This check assumes that 'LOG' is a logger.
N319
"""
for hint in _all_hints:
if logical_line.startswith("LOG.debug(%s(" % hint):
yield(0, "N319 Don't translate debug level logs")
def check_assert_called_once_with(logical_line, filename):
# Try to detect unintended calls of nonexistent mock methods like:
# assert_called_once
# assertCalledOnceWith
# assert_has_called
if 'ord/tests/' in filename:
if '.assert_called_once_with(' in logical_line:
return
uncased_line = logical_line.lower().replace('_', '')
if '.assertcalledonce' in uncased_line:
msg = ("N322: Possible use of no-op mock method. "
"please use assert_called_once_with.")
yield (0, msg)
if '.asserthascalled' in uncased_line:
msg = ("N322: Possible use of no-op mock method. "
"please use assert_has_calls.")
yield (0, msg)
def check_oslo_namespace_imports(logical_line):
if re.match(oslo_namespace_imports_from_dot, logical_line):
msg = ("N323: '%s' must be used instead of '%s'.") % (
logical_line.replace('oslo.', 'oslo_'), logical_line)
yield(0, msg)
elif re.match(oslo_namespace_imports_from_root, logical_line):
msg = ("N323: '%s' must be used instead of '%s'.") % (
logical_line.replace('from oslo import ', 'import oslo_'),
logical_line)
yield(0, msg)
elif re.match(oslo_namespace_imports_dot, logical_line):
msg = ("N323: '%s' must be used instead of '%s'.") % (
logical_line.replace('import', 'from').replace('.', ' import '),
logical_line)
yield(0, msg)
def factory(register):
register(validate_log_translations)
register(use_jsonutils)
register(check_assert_called_once_with)
register(no_translate_debug_logs)
register(check_oslo_namespace_imports)

46
ord/i18n.py Normal file
View File

@ -0,0 +1,46 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html
"""
import oslo_i18n
DOMAIN = 'ord'
_translators = oslo_i18n.TranslatorFactory(domain=DOMAIN)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
def translate(value, user_locale):
return oslo_i18n.translate(value, user_locale)
def get_available_languages():
return oslo_i18n.get_available_languages(DOMAIN)

View File

View File

View File

@ -0,0 +1,45 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html
"""
try:
import oslo.i18n
# NOTE(dhellmann): This reference to o-s-l-o will be replaced by the
# application name when this module is synced into the separate
# repository. It is OK to have more than one translation function
# using the same domain, since there will still only be one message
# catalog.
_translators = oslo.i18n.TranslatorFactory(domain='ord')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
except ImportError:
# NOTE(dims): Support for cases where a project wants to use
# code from oslo-incubator, but is not ready to be internationalized
# (like tempest)
_ = _LI = _LW = _LE = _LC = lambda x: x

View File

@ -0,0 +1,45 @@
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Local storage of variables using weak references"""
import threading
import weakref
class WeakLocal(threading.local):
def __getattribute__(self, attr):
rval = super(WeakLocal, self).__getattribute__(attr)
if rval:
# NOTE(mikal): this bit is confusing. What is stored is a weak
# reference, not the value itself. We therefore need to lookup
# the weak reference and return the inner value here.
rval = rval()
return rval
def __setattr__(self, attr, value):
value = weakref.ref(value)
return super(WeakLocal, self).__setattr__(attr, value)
# NOTE(mikal): the name "store" should be deprecated in the future
store = WeakLocal()
# A "weak" store uses weak references and allows an object to fall out of scope
# when it falls out of scope in the code that uses the thread local storage. A
# "strong" store will hold a reference to the object so that it never falls out
# of scope.
weak_store = WeakLocal()
strong_store = threading.local()

720
ord/openstack/common/log.py Normal file
View File

@ -0,0 +1,720 @@
# Copyright 2011 OpenStack Foundation.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStack logging handler.
This module adds to logging functionality by adding the option to specify
a context object when calling the various log methods. If the context object
is not specified, default formatting is used. Additionally, an instance uuid
may be passed as part of the log message, which is intended to make it easier
for admins to find messages related to a specific instance.
It also allows setting of formatting information through conf.
"""
import copy
import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import socket
import sys
import traceback
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_utils import importutils
import six
from six import moves
_PY26 = sys.version_info[0:2] == (2, 6)
from ord.openstack.common._i18n import _
from ord.openstack.common import local
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output (set logging level to '
'DEBUG instead of default WARNING level).'),
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output (set logging level to '
'INFO instead of default WARNING level).'),
]
logging_cli_opts = [
cfg.StrOpt('log-config-append',
metavar='PATH',
deprecated_name='log-config',
help='The name of a logging configuration file. This file '
'is appended to any existing logging configuration '
'files. For details about logging configuration files, '
'see the Python logging module documentation.'),
cfg.StrOpt('log-format',
metavar='FORMAT',
help='DEPRECATED. '
'A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'This option is deprecated. Please use '
'logging_context_format_string and '
'logging_default_format_string instead.'),
cfg.StrOpt('log-date-format',
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
'Default: %(default)s .'),
cfg.StrOpt('log-file',
metavar='PATH',
deprecated_name='logfile',
help='(Optional) Name of log file to output to. '
'If no default is set, logging will go to stdout.'),
cfg.StrOpt('log-dir',
deprecated_name='logdir',
help='(Optional) The base directory used for relative '
'--log-file paths.'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging. '
'Existing syslog format is DEPRECATED during I, '
'and will change in J to honor RFC5424.'),
cfg.BoolOpt('use-syslog-rfc-format',
# TODO(bogdando) remove or use True after existing
# syslog format deprecation in J
default=False,
help='(Optional) Enables or disables syslog rfc5424 format '
'for logging. If enabled, prefixes the MSG part of the '
'syslog message with APP-NAME (RFC5424). The '
'format without the APP-NAME is deprecated in I, '
'and will be removed in J.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
help='Syslog facility to receive log lines.')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
help='Log output to standard error.')
]
DEFAULT_LOG_LEVELS = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN',
'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO',
'oslo.messaging=INFO', 'iso8601=WARN',
'requests.packages.urllib3.connectionpool=WARN',
'urllib3.connectionpool=WARN', 'websocket=WARN',
"keystonemiddleware=WARN", "routes.middleware=WARN",
"stevedore=WARN"]
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user_identity)s] '
'%(instance)s%(message)s',
help='Format string to use for log messages with context.'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
help='Format string to use for log messages without context.'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
help='Data to append to log format when level is DEBUG.'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
help='Prefix each line of exception output with this format.'),
cfg.ListOpt('default_log_levels',
default=DEFAULT_LOG_LEVELS,
help='List of logger=LEVEL pairs.'),
cfg.BoolOpt('publish_errors',
default=False,
help='Enables or disables publication of error events.'),
cfg.BoolOpt('fatal_deprecations',
default=False,
help='Enables or disables fatal status of deprecations.'),
# NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we
# are just handed a UUID for the instance.
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='The format for an instance that is passed with the log '
'message.'),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='The format for an instance UUID that is passed with the '
'log message.'),
]
CONF = cfg.CONF
CONF.register_cli_opts(common_cli_opts)
CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
def list_opts():
"""Entry point for oslo.config-generator."""
return [(None, copy.deepcopy(common_cli_opts)),
(None, copy.deepcopy(logging_cli_opts)),
(None, copy.deepcopy(generic_log_opts)),
(None, copy.deepcopy(log_opts)),
]
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = sys.argv[0].split('/')[-1]+'.log'
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
if not os.path.exists(logdir):
os.makedirs(logdir)
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
return None
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
def isEnabledFor(self, level):
if _PY26:
# This method was added in python 2.7 (and it does the exact
# same logic, so we need to do the exact same logic so that
# python 2.6 has this capability as well).
return self.logger.isEnabledFor(level)
else:
return super(BaseLoggerAdapter, self).isEnabledFor(level)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
if six.PY3:
# In Python 3, the code fails because the 'manager' attribute
# cannot be found when using a LoggerAdapter as the
# underlying logger. Work around this issue.
self._logger.manager = self._logger.logger.manager
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
self._deprecated_messages_sent = dict()
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
"""Call this method when a deprecated feature is used.
If the system is configured for fatal deprecations then the message
is logged at the 'critical' level and :class:`DeprecatedConfig` will
be raised.
Otherwise, the message will be logged (once) at the 'warn' level.
:raises: :class:`DeprecatedConfig` if the system is configured for
fatal deprecations.
"""
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
# Using a list because a tuple with dict can't be stored in a set.
sent_args = self._deprecated_messages_sent.setdefault(msg, list())
if args in sent_args:
# Already logged this message, so don't log it again.
return
sent_args.append(args)
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(msg, six.text_type):
msg = six.text_type(msg)
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
context = getattr(local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_uuid = (extra.get('instance_uuid') or
kwargs.pop('instance_uuid', None))
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
elif instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra['instance'] = instance_extra
extra.setdefault('user_identity', kwargs.pop('user_identity', None))
extra['project'] = self.project
extra['version'] = self.version
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [moves.filter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(exc_type, value, tb):
extra = {'exc_info': (exc_type, value, tb)}
getLogger(product_name).critical(
"".join(traceback.format_exception_only(exc_type, value)),
**extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config_append):
try:
logging.config.fileConfig(log_config_append,
disable_existing_loggers=False)
except (moves.configparser.Error, KeyError) as exc:
raise LogConfigError(log_config_append, six.text_type(exc))
def setup(product_name, version='unknown'):
"""Setup logging."""
if CONF.log_config_append:
_load_log_config(CONF.log_config_append)
else:
_setup_logging_from_conf(product_name, version)
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string=None,
default_log_levels=None):
# Just in case the caller is not setting the
# default_log_level. This is insurance because
# we introduced the default_log_level parameter
# later in a backwards in-compatible change
if default_log_levels is not None:
cfg.set_defaults(
log_opts,
default_log_levels=default_log_levels)
if logging_context_format_string is not None:
cfg.set_defaults(
log_opts,
logging_context_format_string=logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
class RFCSysLogHandler(logging.handlers.SysLogHandler):
def __init__(self, *args, **kwargs):
self.binary_name = _get_binary_name()
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
def format(self, record):
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
msg = logging.handlers.SysLogHandler.format(self, record)
msg = self.binary_name + ' ' + msg
return msg
def _setup_logging_from_conf(project, version):
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not logpath:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
if CONF.publish_errors:
handler = importutils.import_object(
"oslo.messaging.notify.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
if CONF.log_format:
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
datefmt=datefmt))
log_root.info('Deprecated: log_format is now deprecated and will '
'be removed in the next release')
else:
handler.setFormatter(ContextFormatter(project=project,
version=version,
datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
logger = logging.getLogger(mod)
# NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
# to integer code.
if sys.version_info < (2, 7):
level = logging.getLevelName(level_name)
logger.setLevel(level)
else:
logger.setLevel(level_name)
if CONF.use_syslog:
try:
facility = _find_facility_from_conf()
# TODO(bogdando) use the format provided by RFCSysLogHandler
# after existing syslog format deprecation in J
if CONF.use_syslog_rfc_format:
syslog = RFCSysLogHandler(address='/dev/log',
facility=facility)
else:
syslog = logging.handlers.SysLogHandler(address='/dev/log',
facility=facility)
log_root.addHandler(syslog)
except socket.error:
log_root.error('Unable to add syslog handler. Verify that syslog '
'is running.')
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg.rstrip())
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
If available, uses the context value stored in TLS - local.store.context
"""
def __init__(self, *args, **kwargs):
"""Initialize ContextFormatter instance
Takes additional keyword arguments which can be used in the message
format string.
:keyword project: project name
:type project: string
:keyword version: project version
:type version: string
"""
self.project = kwargs.pop('project', 'unknown')
self.version = kwargs.pop('version', 'unknown')
logging.Formatter.__init__(self, *args, **kwargs)
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
# before it can get to the python logging and
# possibly cause string encoding trouble
if not isinstance(record.msg, six.text_type):
record.msg = six.text_type(record.msg)
# store project info
record.project = self.project
record.version = self.version
# store request info
context = getattr(local.store, 'context', None)
if context:
d = _dictify_context(context)
for k, v in d.items():
setattr(record, k, v)
# NOTE(sdague): default the fancier formatting params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color', 'user_identity'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id'):
fmt = CONF.logging_context_format_string
else:
fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
fmt += " " + CONF.logging_debug_format_suffix
if sys.version_info < (3, 2):
self._fmt = fmt
else:
self._style = logging.PercentStyle(fmt)
self._fmt = self._style._fmt
# Cache this on the record, Logger will respect our formatted copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg))

66
ord/service.py Normal file
View File

@ -0,0 +1,66 @@
#!/usr/bin/env python
#
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
import sys
from oslo_config import cfg
import oslo_i18n as i18n
from ord.common import utils
from ord.i18n import _
from ord.openstack.common import log
OPTS = [
cfg.StrOpt('host',
default=socket.gethostname(),
help='Name of this node, which must be valid in an AMQP '
'key. Can be an opaque identifier. For ZeroMQ only, must '
'be a valid host name, FQDN, or IP address.'),
]
cfg.CONF.register_opts(OPTS)
LOG = log.getLogger(__name__)
class WorkerException(Exception):
"""Exception for errors relating to service workers."""
def get_workers(name):
workers = (cfg.CONF.get('%s_workers' % name) or
utils.cpu_count())
if workers and workers < 1:
msg = (_("%(worker_name)s value of %(workers)s is invalid, "
"must be greater than 0") %
{'worker_name': '%s_workers' % name, 'workers': str(workers)})
raise WorkerException(msg)
return workers
def prepare_service(argv=None):
i18n.enable_lazy()
log_levels = (cfg.CONF.default_log_levels +
['stevedore=INFO'])
cfg.set_defaults(log.log_opts,
default_log_levels=log_levels)
if argv is None:
argv = sys.argv
cfg.CONF(argv[1:], project='ord', validate_default_values=True)
log.setup('ord')
# messaging.setup()

0
ord/tests/__init__.py Normal file
View File

61
ord/tests/base.py Normal file
View File

@ -0,0 +1,61 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import tempfile
import mock
import testtools
class BaseTestCase(testtools.TestCase):
_patches = []
def __init__(self, *args, **kwargs):
super(BaseTestCase, self).__init__(*args, **kwargs)
self._patches = []
def setUp(self):
super(BaseTestCase, self).setUp()
self._patches[:] = []
def patch(self, *args, **kwargs):
self._patches.append(mock.patch(*args, **kwargs))
self.addCleanup(self._patches[-1].stop)
return self._patches[-1].start()
@staticmethod
def make_tempfile(payload=None, prefix=None, named=True):
if named:
cls = tempfile.NamedTemporaryFile
else:
cls = tempfile.TemporaryFile
if prefix:
prefix = 'ord-{}-'.format(prefix)
else:
prefix = 'ord-'
fd = cls(prefix=prefix)
if payload:
fd.write(payload)
fd.seek(0, os.SEEK_SET)
return fd
class Dummy(object):
def __init__(self, **attributes):
self.__dict__.update(attributes)

View File

View File

View File

@ -0,0 +1,288 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for ord.api.test_api
"""
from ord.tests import base
from ord.api.controllers.v1 import api
from ord.db import api as db_api
from oslo_config import cfg
from mox import stubout
import mock
import requests
import urllib2
import webob
CONF = cfg.CONF
class OrdApiTestCase(base.BaseTestCase):
PATH_PREFIX = ''
def setUp(self):
super(OrdApiTestCase, self).setUp()
self.stubs = stubout.StubOutForTesting()
self.addCleanup(self.stubs.UnsetAll)
self.addCleanup(self.stubs.SmartUnsetAll)
def test_api_notifier(self):
ord_notifier = api.NotifierController()
kwargs = {
'request_id': '1',
'resource_id': 'qwe1234',
'resource-type': 'image'
}
payload = str(kwargs)
params = {
"ord-notifier": {
"request-id": "2",
"resource-id": "1",
"resource-type": "image",
"resource-template-version": "1",
"resource-template-name": "image1",
"resource-template-type": "hot",
"operation": "create",
"region": "local"}
}
db_response = {'template_type': u'hot',
'status': 'Submitted',
'resource_name': u'image1',
'resource_operation': u'create',
'resource_template_version': u'1',
'request_id': '2', 'region': u'local',
'resource_id': u'1',
'resource_type': u'image',
'template_status_id': '1234'}
CONF.set_default('region', 'local')
def fake_persist_notification_record(*args, **kwds):
return db_response
def fake_invoke_notifier_rpc(*args, **kwds):
return payload
self.stubs.Set(ord_notifier, "_persist_notification_record",
fake_persist_notification_record)
self.stubs.Set(ord_notifier._rpcapi, "invoke_notifier_rpc",
fake_invoke_notifier_rpc)
response = ord_notifier.ord_notifier_POST(**params)
expect_response = response['ord-notifier-response']['status']
self.assertEqual(expect_response, 'Submitted')
def test_api_listener(self):
ctxt = {'request_id': '1'}
api_listener = api.ListenerQueueHandler()
kwargs = '{"request_id": "1",'\
' "resource_id": "qwe1234","resource-type": "image"}'
payload = str(kwargs)
db_template_target = {'template_type': 'hot',
'status': 'STATUS_RDS_SUCCESS',
'error_code': '',
'error_msg': ''}
def mock_url_open(mock_response):
mock_response = mock.Mock()
mock_response.getcode.return_value = 200
def urlrequest_mock_method(url, payload, headers):
return "Failure"
def fake_update_target(*args, **kwds):
return db_template_target
self.stubs.Set(urllib2, 'urlopen', mock_url_open)
self.stubs.Set(db_api, "update_target_data",
fake_update_target)
self.stubs.Set(urllib2, 'Request', urlrequest_mock_method)
api_listener.invoke_listener_rpc(ctxt, payload)
def test_rds_listener_failure(self):
ctxt = {'request_id': '1'}
api_listener = api.ListenerQueueHandler()
kwargs = '{"rds-listener": { "ord-notifier-id": "2",'\
'"status": "error","resource-type": "image",'\
'"error-code": "","error-msg": ""}}'
db_template_target = {'template_type': 'hot',
'status': 'STATUS_RDS_SUCCESS',
'error_code': '',
'error_msg': ''}
payload = str(kwargs)
output_status = 'STATUS_RDS_SUCCESS'
def mock_method(url, payload, headers):
return "Failure"
self.stubs.Set(urllib2, 'Request', mock_method)
def mock_url_open(mock_response):
mock_response = mock.Mock()
http_error = requests.exceptions.HTTPError()
mock_response.raise_for_status.side_effect = http_error
def fake_update_target(*args, **kwds):
return db_template_target
self.stubs.Set(urllib2, 'urlopen', mock_url_open)
self.stubs.Set(db_api, "update_target_data",
fake_update_target)
api_listener.invoke_listener_rpc(ctxt, payload)
self.assertEqual(output_status, db_template_target['status'])
def test_rds_listener_success(self):
ctxt = {'request_id': '1'}
api_listener = api.ListenerQueueHandler()
kwargs = '{"rds-listener": { "ord-notifier-id": "2",'\
'"status": "error","resource-type": "image",'\
'"error-code": "","error-msg": ""}}'
db_template_target = {'template_type': 'hot',
'status': 'Error_RDS_Dispatch',
'error_code': '',
'error_msg': ''}
payload = str(kwargs)
output_status = 'Error_RDS_Dispatch'
def mock_method(url, payload, headers):
return "Success"
self.stubs.Set(urllib2, 'Request', mock_method)
def mock_url_open(mock_response):
mock_response = mock.Mock()
mock_response.getcode.return_value = 200
def fake_update_target(*args, **kwds):
return db_template_target
self.stubs.Set(urllib2, 'urlopen', mock_url_open)
self.stubs.Set(db_api, "update_target_data",
fake_update_target)
api_listener.invoke_listener_rpc(ctxt, payload)
self.assertEqual(output_status, db_template_target['status'])
def test_api_notifier_for_blank_region(self):
ord_notifier = api.NotifierController()
params = {
"ord-notifier": {
"request-id": "2",
"resource-id": "1",
"resource-type": "image",
"resource-template-version": "1",
"resource-template-name": "image1",
"resource-template-type": "hot",
"operation": "create"}
}
self.assertRaises(webob.exc.HTTPBadRequest,
ord_notifier.ord_notifier_POST,
**params)
def test_api_notifier_for_invalid_region(self):
ord_notifier = api.NotifierController()
params = {
"ord-notifier": {
"request-id": "2",
"resource-id": "1",
"resource-type": "image",
"resource-template-version": "1",
"resource-template-name": "image1",
"resource-template-type": "hot",
"operation": "create",
"region": "dev"}
}
CONF.set_default('region', 'local')
self.assertRaises(webob.exc.HTTPBadRequest,
ord_notifier.ord_notifier_POST,
**params)
def test_api_notifier_for_invalid_payload(self):
ord_notifier = api.NotifierController()
params = {
"ord-notifier": {
"request-id": "2",
"resource-id": "1",
"resource-type": "imag e",
"resource-template-version": "1",
"resource-template-name": "ima ge1",
"resource-template-type": "hot",
"operation": "create",
"region": "local"}
}
CONF.set_default('region', 'local')
self.assertRaises(webob.exc.HTTPBadRequest,
ord_notifier.ord_notifier_POST,
**params)
def test_api_ord_notifier_status(self):
ord_notifier = api.NotifierController()
request_id = {"Id": "2"}
db_template = {'resource_operation': 'create',
'resource_id': '1',
'region': 'local',
'template_type': 'hot',
'request_id': '2'}
db_template_target = {'template_type': 'hot',
'status': 'Submitted',
'resource_name': 'image1',
'resource_operation': 'create',
'resource_template_version': '1',
'request_id': '2',
'region': 'local',
'ord-notifier-id': '1',
'resource_id': '1',
'resource_type': 'image',
'template_status_id': '1',
'template_version': '1',
'error_code': 'ORD_000',
'error_msg': 'stack fail'}
payload = {'rds-listener':
{'request-id': '2',
'resource-id': '1',
'resource-type': 'image',
'resource-template-version': '1',
'resource-template-type': 'hot',
'resource-operation': 'create',
'ord-notifier-id': '1',
'region': 'local',
'status': 'Submitted',
'error-code': 'ORD_000',
'error-msg': 'stack fail'}
}
def fake_retrieve_template(*args, **kwds):
return db_template
def fake_retrieve_target(*args, **kwds):
return db_template_target
self.stubs.Set(db_api, "retrieve_template",
fake_retrieve_template)
self.stubs.Set(db_api, "retrieve_target",
fake_retrieve_target)
notification_status = ord_notifier.ord_notifier_status(**request_id)
self.assertEqual(payload, notification_status)

View File

View File

@ -0,0 +1,54 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from mock import patch
from ord.tests import base
from ord.client import getrepo
from ord.common.exceptions import ORDException
from oslo_config import cfg
CONF = cfg.CONF
class GetRepoTestCase(base.BaseTestCase):
def setUp(self):
super(GetRepoTestCase, self).setUp()
self.git_inst = None
self.local_repo = 'ord_test'
with patch.object(getrepo.TemplateRepoClient, 'git_init_repo'):
self.git_inst = getrepo.TemplateRepoClient(self.local_repo)
def test_pullrepo_template(self):
path = os.path.abspath('')
testfile = 'ord/dummy.py'
expected = path + "/" + testfile
with patch.object(self.git_inst, 'run_git'):
result = self.git_inst.pull_template(path, testfile)
self.assertEqual(expected, result)
def test_fail_pull_template(self):
path = os.path.abspath('')
testfile = 'tests/files/stack0.yaml'
self.assertRaises(ORDException, self.git_inst.pull_template,
path, testfile)
def test_git_init_repo(self):
self.subprocess = mock.Mock()
with patch.object(self.git_inst, 'run_git') as mock_method:
self.git_inst.git_init_repo(self.local_repo)
mock_method.assert_called()

View File

@ -0,0 +1,130 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heatclient import exc as heat_exc
import mock
from ord.client import heat as ord_heat
from ord.tests import base
from ord.common import exceptions as exc
class TestHeatClient(base.BaseTestCase):
test_template = """heat_template_version: 2015-04-30
description: Test stack"""
dummy_stacks_list = [
base.Dummy(stack_name='a'),
base.Dummy(stack_name='bb'),
base.Dummy(stack_name='ccc')]
dummy_resource_list = [base.Dummy(resource_type='Image',
physical_resource_id='1234')]
def setUp(self):
super(TestHeatClient, self).setUp()
self.clients = mock.Mock()
self.clients.heat.return_value = self.heat_client,\
self.keystone_client = mock.Mock(), mock.Mock()
self.clients.glance.return_value = self.glance_client,\
self.keystone_client = mock.Mock(), mock.Mock()
self.patch('ord.client.heat.Clients').return_value = self.clients
self.heat = ord_heat.HeatClient()
def test_get_stacks(self):
self.heat_client.stacks.list.return_value = self.dummy_stacks_list
result = self.heat.get_stacks()
self.assertEqual(self.dummy_stacks_list, result)
def test_get_stack(self):
stack_idnr = "1"
stack = self.dummy_stacks_list[0]
self.heat_client.stacks.get.return_value = stack
result = self.heat.get_stack(stack_idnr)
self.assertEqual(stack, result)
self.heat_client.stacks.get.assert_called_with(stack_idnr)
def test_get_stack_by_name(self):
name = self.dummy_stacks_list[-1].stack_name
self.heat_client.stacks.list.return_value = self.dummy_stacks_list
result = self.heat.get_stack_by_name(name)
self.assertEqual(self.dummy_stacks_list[-1], result)
def test_get_stack_by_name_fail(self):
name = 'force-name-mismatch-{}'.format(
self.dummy_stacks_list[-1].stack_name)
self.heat_client.stacks.list.return_value = self.dummy_stacks_list
self.assertRaises(
exc.HEATLookupError, self.heat.get_stack_by_name, name)
def test_create_stack(self):
stack_name = "test_stack"
template = self.make_tempfile(
prefix='heat-create', payload=self.test_template)
self.heat.create_stack(stack_name, template.name)
self.heat_client.stacks.create.assert_called_once_with(
stack_name=stack_name, template=self.test_template)
def test_update_stack(self):
stack_idnr = "1"
template = self.make_tempfile(
prefix='heat-update', payload=self.test_template)
self.heat.update_stack(stack_idnr, template.name)
self.heat_client.stacks.update.assert_called_once_with(
stack_idnr, template=self.test_template)
def test_delete_stack(self):
stack_idnr = "1"
self.heat.delete_stack(stack_idnr)
self.heat_client.stacks.delete.assert_called_with(stack_idnr)
def test_error_masquerading(self):
error = heat_exc.CommunicationError('ord-heat-stack-create-test')
stack_idnr = '0'
stack_name = "test_stack"
template = self.make_tempfile(
prefix='head-create', payload=self.test_template)
h = self.heat_client
for mock_call, method, args in (
(h.stacks.list, self.heat.get_stacks, ()),
(h.stacks.create, self.heat.create_stack,
(stack_name, template.name)),
(h.stacks.update, self.heat.update_stack,
(stack_idnr, template.name)),
(h.stacks.delete, self.heat.delete_stack, (stack_idnr,))):
mock_call.side_effect = error
if not args:
args = tuple()
self.assertRaises(
exc.HEATIntegrationError, method, *args)
def get_image_data_by_stackid(self):
stack_id = '1234'
self.heat_client.resources.list.return_value = self.dummy_resource_list
image_data = 'new_image'
self.glance_client.images.get.return_value = image_data
result = self.heat.get_image_data_by_stackid(stack_id)
self.assertEqual(image_data, result)

View File

@ -0,0 +1,89 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for ord.client.rpcapi
"""
import copy
from ord.tests import base
from ord.client import rpcapi
from oslo_config import cfg
import stubout
CONF = cfg.CONF
class RpcAPITestCase(base.BaseTestCase):
def setUp(self):
super(RpcAPITestCase, self).setUp()
self.stubs = stubout.StubOutForTesting()
self.addCleanup(self.stubs.UnsetAll)
self.addCleanup(self.stubs.SmartUnsetAll)
def _test_api(self, method, rpc_method, **kwargs):
ctxt = {'request_id': '1'}
rpcapi_inst = rpcapi.RpcAPI()
self.assertIsNotNone(rpcapi_inst.target)
self.assertIsNotNone(rpcapi_inst.transport)
self.assertIsNotNone(rpcapi_inst._client)
self.assertEqual(rpcapi_inst.target.topic, 'ord-notifier-q')
expected_retval = 'foo' if method == 'call' else None
target = {
"version": kwargs.pop('version', '1.0')
}
expected_msg = copy.deepcopy(kwargs)
self.fake_args = None
self.fake_kwargs = None
def _fake_prepare_method(*args, **kwds):
for kwd in kwds:
self.assertEqual(kwds[kwd], target[kwd])
return rpcapi_inst._client
def _fake_rpc_method(*args, **kwargs):
self.fake_args = args
self.fake_kwargs = kwargs
if expected_retval:
return expected_retval
self.stubs.Set(rpcapi_inst._client, "prepare", _fake_prepare_method)
self.stubs.Set(rpcapi_inst._client, rpc_method, _fake_rpc_method)
retval = getattr(rpcapi_inst, method)(ctxt, **kwargs)
self.assertEqual(retval, expected_retval)
expected_args = [ctxt, method, expected_msg]
for arg, expected_arg in zip(self.fake_args, expected_args):
self.assertEqual(arg, expected_arg)
def test_invoke_notifier_rpc(self):
kwargs = {
'request_id': '1',
'resource_id': 'qwe1234',
'resource-type': 'image'
}
payload = str(kwargs)
self._test_api('invoke_notifier_rpc',
rpc_method='cast',
payload=payload,
version='1.0')

View File

@ -0,0 +1,89 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for ord.client.rpcengine
"""
import copy
from ord.tests import base
from ord.client import rpcengine
from oslo_config import cfg
import stubout
CONF = cfg.CONF
class RpcEngineTestCase(base.BaseTestCase):
def setUp(self):
super(RpcEngineTestCase, self).setUp()
self.stubs = stubout.StubOutForTesting()
self.addCleanup(self.stubs.UnsetAll)
self.addCleanup(self.stubs.SmartUnsetAll)
def _test_api(self, method, rpc_method, **kwargs):
ctxt = {'request_id': '1'}
rpcengine_inst = rpcengine.RpcEngine()
self.assertIsNotNone(rpcengine_inst.target)
self.assertIsNotNone(rpcengine_inst.transport)
self.assertIsNotNone(rpcengine_inst._client)
self.assertEqual(rpcengine_inst.target.topic, 'ord-listener-q')
expected_retval = 'foo' if method == 'call' else None
target = {
"version": kwargs.pop('version', '1.0')
}
expected_msg = copy.deepcopy(kwargs)
self.fake_args = None
self.fake_kwargs = None
def _fake_prepare_method(*args, **kwds):
for kwd in kwds:
self.assertEqual(kwds[kwd], target[kwd])
return rpcengine_inst._client
def _fake_rpc_method(*args, **kwargs):
self.fake_args = args
self.fake_kwargs = kwargs
if expected_retval:
return expected_retval
self.stubs.Set(rpcengine_inst._client, "prepare", _fake_prepare_method)
self.stubs.Set(rpcengine_inst._client, rpc_method, _fake_rpc_method)
retval = getattr(rpcengine_inst, method)(ctxt, **kwargs)
self.assertEqual(retval, expected_retval)
expected_args = [ctxt, method, expected_msg]
for arg, expected_arg in zip(self.fake_args, expected_args):
self.assertEqual(arg, expected_arg)
def test_invoke_listener_rpc(self):
kwargs = {
'request_id': '1',
'resource_id': 'qwe1234',
'resource-type': 'image'
}
payload = str(kwargs)
self._test_api('invoke_listener_rpc',
rpc_method='cast',
payload=payload,
version='1.0')

View File

View File

@ -0,0 +1,99 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ord.common import exceptions as exc
from ord.tests import base
class TestExceptions(base.BaseTestCase):
def test_no_argumen_error(self):
err = _SubjectError0()
self.assertEqual('Error: subject0', err.message)
def test_message_override(self):
err = _SubjectError0('Subject0: fully custom message')
self.assertEqual('Subject0: fully custom message', err.message)
def test_argument_substitution(self):
err = _SubjectError1(sub='runtime substitution')
self.assertEqual('Error: subject1 - runtime substitution', err.message)
def test_missing_mandatory_argument(self):
self.assertRaises(TypeError, _SubjectError1)
def test_custom_message_and_substitution(self):
self.assertRaises(TypeError, _SubjectError1,
'Custom error message', sub='test')
def test_default_substitution(self):
err = _SubjectError2()
self.assertEqual('Error: subject2 - default description', err.message)
def test_default_substitution_inheritance(self):
err = _SubjectError3()
self.assertEqual('Error: subject3 - default description, one more '
'default substitution', err.message)
def test_substitution_overrride(self):
err = _SubjectError3(sub='aaa', sub2='bbb')
self.assertEqual('Error: subject3 - aaa, bbb', err.message)
def test_arguments(self):
dummy = base.Dummy(test='test')
e = _SubjectError2(dummy=dummy)
self.assertEqual({
'sub': 'default description',
'dummy': dummy}, e.arguments)
def test_arguments_immutability(self):
e = _SubjectError2()
e.arguments['sub'] = 'test'
e.arguments['new'] = 'new'
self.assertEqual({
'sub': 'default description'}, e.arguments)
def test_clone(self):
dummy = base.Dummy(clone='clone')
e = _SubjectError2()
e_cloned = e.clone(sub='clone', dummy=dummy)
self.assertIs(_SubjectError2, type(e_cloned))
self.assertEqual({
'sub': 'clone',
'dummy': dummy}, e.arguments)
self.assertEqual('Error: subject2 - clone', e_cloned.message)
class _SubjectError0(exc.ORDException):
message_template = 'Error: subject0'
class _SubjectError1(exc.ORDException):
message_template = 'Error: subject1 - {sub}'
class _SubjectError2(exc.ORDException):
message_template = 'Error: subject2 - {sub}'
default_substitution_values = {
'sub': 'default description'}
class _SubjectError3(_SubjectError2):
message_template = 'Error: subject3 - {sub}, {sub2}'
default_substitution_values = {
'sub2': 'one more default substitution'}

View File

@ -0,0 +1,51 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import errno
import os
from ord.common import exceptions as exc
from ord.common import utils
from ord.tests import base
class TestUtils(base.BaseTestCase):
def test_load_file(self):
payload = 'dummy' * 5
temp = self.make_tempfile(payload=payload)
result = utils.load_file(temp.name)
self.assertEqual(payload, result)
def test_load_file_fail(self):
temp = self.make_tempfile(payload='payload')
error = IOError(errno.ENOENT, os.strerror(errno.ENOENT), temp.name)
self.patch('__builtin__.open', side_effect=error)
self.assertRaises(exc.InternalError, utils.load_file, temp.name)
def test_printable_time_interval(self):
for delay, expect, expect_no_ms in (
(0, '0ms', '0ms'),
(1, '1s 0ms', '1s'),
(1.50001, '1s 500ms', '1s'),
(65, '1m 5s 0ms', '1m 5s'),
(3605, '1h 0m 5s 0ms', '1h 0m 5s'),
(3601 * 25, '1d 1h 0m 25s 0ms', '1d 1h 0m 25s'),
(3600 * 24 * 367, '367d 0h 0m 0s 0ms', '367d 0h 0m 0s')):
self.assertEqual(
expect, utils.printable_time_interval(delay, show_ms=True))
self.assertEqual(
expect_no_ms, utils.printable_time_interval(delay))

View File

View File

View File

@ -0,0 +1,82 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from ord.db.sqlalchemy import api as db_api
from ord.tests import base
class TestORDNotify(base.BaseTestCase):
def _prepare_fake_message(self):
fake_input = {"request_id": "fake_req",
"resource_id": "fake_res",
"template_type": "hot",
"resource_operation": "create",
"region": "fake_region",
"time_stamp": datetime.datetime.now(),
"template_status_id": "fake_id",
"resource_template_version": "fake_ver",
"resource_name": "fake_name",
"resource_type": "fake_type",
"status": "submitted",
"error_code": "",
"error_msg": ""
}
return fake_input
def setUp(self):
super(TestORDNotify, self).setUp()
self.mock_db_api = mock.Mock()
self.patch(
'ord.db.sqlalchemy.api.get_session').return_value = mock.Mock()
self.patch(
'ord.db.sqlalchemy.api.get_engine').return_value = mock.Mock()
@mock.patch.object(db_api, 'get_session')
def test_create_ord_data(self, mock_session):
input_msg = self._prepare_fake_message()
db_api.create_template(input_msg)
mock_session.assert_called_once_with()
@mock.patch.object(db_api, 'get_session')
@mock.patch.object(db_api, 'create_target')
def test_create_ord_target_data(self, mock_taget, mock_session):
input_msg = self._prepare_fake_message()
db_api.create_template(input_msg)
mock_session.assert_called_once_with()
assert mock_taget.called
@mock.patch.object(db_api, 'get_session')
@mock.patch.object(db_api, 'model_query')
def test_retrieve_ord_data(self, mock_query, mock_session):
db_api.retrieve_template("fake_res")
mock_session.assert_called_once_with()
assert mock_query.called
@mock.patch.object(db_api, 'get_session')
@mock.patch.object(db_api, 'model_query')
def test_retrieve_target_data(self, mock_query, mock_session):
db_api.retrieve_target("fake_res")
mock_session.assert_called_once_with()
assert mock_query.called
@mock.patch.object(db_api, 'get_session')
@mock.patch.object(db_api, 'model_query')
def test_retrieve_target_data_by_status(self, mock_query, mock_session):
db_api.retrieve_target_by_status("fake_id")
mock_session.assert_called_once_with()
assert mock_query.called

View File

View File

@ -0,0 +1,69 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from ord.tests import base
from ord.engine.workerfactory import WorkerFactory
from ord.common.exceptions import WorkerThreadError
class TestWorkerFactory(base.BaseTestCase):
def setUp(self):
self.operation = 'create'
self.path_to_tempate = 'test_path'
self.stack_name = 'test_stack'
self.template_status_id = '1'
self.resource_type = 'image'
self.template_type = 'hot'
self.threadId = 123
super(TestWorkerFactory, self).setUp()
self.clients = mock.Mock()
self.patch('ord.client.getrepo.TemplateRepoClient')\
.return_value = self.clients
self.patch('ord.client.heat.HeatClient').return_value = self.clients
self.patch('ord.client.rpcengine.RpcEngine')\
.return_value = self.clients
self.worker = WorkerFactory()
def test_getWorker(self):
threadId = self.worker.getWorker(self.operation, self.path_to_tempate,
self.stack_name, self.template_status_id,
self.resource_type, self.template_type)
assert (threadId > 0)
def test_negetive_removeWorker(self):
self.assertRaises(WorkerThreadError, self.worker.removeWorker,
self.threadId)
def test_removeWorker(self):
localThreadId = self.worker.getWorker(self.operation,
self.path_to_tempate,
self.stack_name,
self.template_status_id,
self.resource_type,
self.template_type)
try:
self.worker.removeWorker(localThreadId)
except Exception:
self.fail()
def test_negetive_execute(self):
self.assertRaises(WorkerThreadError, self.worker.execute,
self.threadId, self.operation)

View File

@ -0,0 +1,402 @@
# Copyright 2016 ATT
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
import os
import mock
from oslo_config import cfg
from ord.common import exceptions as exc
from ord.common import utils
from ord.engine import workerfactory
from ord.tests import base
CONF = cfg.CONF
# FIXME(db2242): pep8 compatible - camelcase attributes
class TestWorkerThread(base.BaseTestCase):
def setUp(self):
super(TestWorkerThread, self).setUp()
self.operation = utils.OPERATION_CREATE
self.path_to_tempate = 'test_path'
self.stack_name = 'test_stack'
self.template_status_id = '1'
self.resource_type = 'image'
self.template_type = 'hot'
self.threadId = 123
self.local_repo = 'aic-orm-resources-labs'
self._temp_repo_client = mock.Mock()
self._temp_repo_client.pull_template.return_value = self.pull_client\
= mock.Mock()
self.patch('ord.engine.workerfactory.getrepo').return_value\
= self._temp_repo_client
self.db_api = mock.Mock()
self.db_api.update_target_data.return_value = self.db_client\
= mock.Mock()
self.patch('ord.db.sqlalchemy.api').return_value\
= self.db_api
self.WorkerFactory = mock.Mock()
self.WorkerFactory.removeWorker.return_value = self.remove_clinet\
= mock.Mock()
self.patch('ord.engine.workerfactory.WorkerFactory').return_value\
= self.WorkerFactory
self.workerThread = workerfactory.WorkerThread(
self.threadId, self.operation, self.path_to_tempate,
self.stack_name, self.template_status_id,
self.resource_type)
self.workerThread._heat_client = self.heat_client = mock.Mock()
self.workerThread._temp_repo_client = self._temp_repo_client
self.workerThread.db_api = self.db_api
def test_extract_resource_extra_metadata(self):
stack = base.Dummy(id='1', stack_name=self.stack_name)
image_data = {'checksum': 'dae557b1365b606e57fbd5d8c9d4516a',
'size': '10',
'virtual_size': '12'}
input_payload = {'rds-listener':
{'request-id': '2',
'resource-id': '1',
'resource-type': 'image'}
}
output_payload = {'rds-listener':
{'request-id': '2',
'resource-id': '1',
'resource-type': 'image',
'resource_extra_metadata':
{'checksum': 'dae557b1365b606e57fbd5d8c9d4516a',
'size': '10',
'virtual_size': '12'}}}
self.heat_client.get_stack_by_name.return_value = stack
self.heat_client.get_image_data_by_stackid.return_value = image_data
self.workerThread.extract_resource_extra_metadata(
input_payload, utils.STATUS_SUCCESS)
self.heat_client.get_stack_by_name.assert_called_once_with(
stack.stack_name)
self.heat_client.\
get_image_data_by_stackid.assert_called_once_with(stack.id)
self.assertEqual(output_payload, input_payload)
def test_fetch_template(self):
self.workerThread._fetch_template()
self._temp_repo_client.pull_template\
.assert_called_with(self.local_repo, self.path_to_tempate)
def test_create_stack(self):
self.heat_client.create_stack.return_value = {'stack': {'id': 1}}
template = os.path.join(
os.path.expanduser('~'), self.local_repo, self.path_to_tempate)
self.workerThread._create_stack(template)
self.heat_client.create_stack.assert_called_once_with(
self.stack_name, template)
def test_update_stack(self):
stack = base.Dummy(id='1', stack_name=self.stack_name)
template = os.path.join(
os.path.expanduser('~'), self.local_repo, self.path_to_tempate)
self.heat_client.get_stack_by_name.return_value = stack
self.workerThread._update_stack(template)
self.heat_client.get_stack_by_name.assert_called_once_with(
self.stack_name)
self.heat_client.update_stack.\
assert_called_with(stack.id, template)
def test_delete_stack(self):
stack = base.Dummy(id='1', stack_name=self.stack_name)
self.heat_client.get_stack_by_name.return_value = stack
self.workerThread._delete_stack()
self.heat_client.get_stack_by_name.assert_called_once_with(
stack.stack_name)
self.heat_client.delete_stack.assert_called_once_with(stack.id)
def test_wait_for_heat(self):
time_time = self.patch('time.time', side_effect=itertools.count(1))
time_sleep = self.patch('time.sleep')
stack_wait = base.Dummy(
id='1', stack_name=self.stack_name,
stack_status='CREATE_IN_PROGRESS')
stack_ready = base.Dummy(
id='1', stack_name=self.stack_name, stack_status='CREATE_COMPLETE')
status_responses = [stack_wait] * 4 + [stack_ready]
self.heat_client.get_stack.side_effect = status_responses
# raise exception in case of failure
self.workerThread._wait_for_heat(stack_wait, utils.OPERATION_CREATE)
self.assertEqual(
[mock.call(CONF.heat_poll_interval)] * 5,
time_sleep.mock_calls)
self.assertEqual(6, time_time.call_count)
def test_wait_for_heat_fail(self):
self.patch('time.time', side_effect=itertools.count(1))
self.patch('time.sleep')
stack_wait = base.Dummy(
id='1', stack_name=self.stack_name,
stack_status='CREATE_IN_PROGRESS')
stack_ready = base.Dummy(
id='1', stack_name=self.stack_name, stack_status='CREATE_FAILED')
status_responses = [stack_wait] * 4 + [stack_ready]
self.heat_client.get_stack.side_effect = status_responses
error = self.assertRaises(
exc.StackOperationError, self.workerThread._wait_for_heat,
stack_wait, utils.OPERATION_CREATE)
self.assertEqual(utils.OPERATION_CREATE, error.arguments['operation'])
self.assertIs(status_responses[-1], error.arguments['stack'])
def test_wait_for_heat_race(self):
self.patch('time.time', side_effect=itertools.count(1))
self.patch('time.sleep')
stack_initial = base.Dummy(
id='1', stack_name=self.stack_name, stack_status='UPDATE_COMPLETE',
updated_time='2016-06-02T16:30:48Z')
stack_wait = base.Dummy(
id='1', stack_name=self.stack_name,
stack_status='UPDATE_IN_PROGRESS',
updated_time='2016-06-02T16:30:48Z')
stack_ready = base.Dummy(
id='1', stack_name=self.stack_name, stack_status='UPDATE_COMPLETE',
updated_time='2016-06-02T16:30:50Z')
status_responses = [stack_initial]
status_responses += [stack_wait] * 2
status_responses += [stack_ready]
status_transition = workerfactory.StatusTransitions('_unittest_')
self.patch(
'ord.engine.workerfactory.StatusTransitions',
return_value=status_transition)
self.heat_client.get_stack.side_effect = status_responses
self.workerThread._wait_for_heat(stack_initial, utils.OPERATION_MODIFY)
self.assertEqual('UPDATE_COMPLETE', status_transition.transitions[-1])
def test_run(self):
self.workerThread._execute_operation = execute = mock.Mock()
execute.return_value = 'OPERATION_STATUS'
self.workerThread._update_permanent_storage = \
save_results = mock.Mock()
self.workerThread._send_operation_results = send_results = mock.Mock()
self.workerThread.run()
execute.assert_called_with()
save_results.assert_called_once_with()
send_results.assert_called_once_with()
def test_run_fail(self):
error = exc.StackOperationError(operation='unittest', stack='dummy')
self.workerThread._execute_operation = execute = mock.Mock(
side_effect=error)
self.workerThread._update_permanent_storage = save_status = mock.Mock()
self.workerThread._send_operation_results = send_results = mock.Mock()
self.workerThread.run()
execute.assert_called_once_with()
save_status.assert_called_once_with(error)
send_results.assert_called_once_with()
def test_run_fail_uncontrolled(self):
error = ZeroDivisionError()
self.workerThread._execute_operation = execute = mock.Mock(
side_effect=error)
self.workerThread._update_permanent_storage = save_status = mock.Mock()
self.workerThread._send_operation_results = send_results = mock.Mock()
self.workerThread.run()
execute.assert_called_once_with()
def test_update_permanent_storage(self):
db_api = self.patch('ord.engine.workerfactory.db_api')
self.workerThread._update_permanent_storage()
db_api.update_target_data.assert_called_once_with(
self.template_status_id, utils.STATUS_SUCCESS,
error_code=exc.SUCCESS_CODE)
def test_update_permanent_storage_error(self):
db_api = self.patch('ord.engine.workerfactory.db_api')
generic_error = ZeroDivisionError()
ord_error = exc.IntegrationError('unit-test')
stack_error = exc.StackOperationError(
stack='ord-stack-error-without-rollback', operation='unit-test')
stack_error_rollback = exc.StackOperationError(
stack='ord-stack-error-with-rollback',
operation=utils.OPERATION_CREATE, rollback_status=True)
stack_error_rollback_fail0 = exc.StackOperationError(
stack='ord-stack-error-with-rollback-fail',
operation=utils.OPERATION_CREATE, rollback_status=False)
stack_error_rollback_fail1 = exc.StackOperationError(
stack='ord-stack-error-with-rollback-fail-and-message',
operation=utils.OPERATION_CREATE, rollback_status=False,
rollback_message='a\nbb\nccc')
for error, status, error_code, error_message in (
(generic_error, utils.STATUS_INTERNAL_ERROR,
exc.ERROR_UNKNOWN_EXCEPTION, str(generic_error)),
(ord_error, utils.STATUS_INTERNAL_ERROR,
ord_error.error_code, ord_error.message),
(stack_error, utils.STATUS_ERROR,
stack_error.error_code, stack_error.message),
(stack_error_rollback, utils.STATUS_ERROR,
stack_error_rollback.error_code,
'{}\n[ROLLBACK] success'.format(
stack_error_rollback.message)),
(stack_error_rollback_fail0, utils.STATUS_ERROR,
stack_error_rollback_fail0.error_code,
'{}\n[ROLLBACK] fail'.format(
stack_error_rollback_fail0.message)),
(stack_error_rollback_fail1, utils.STATUS_ERROR,
stack_error_rollback_fail1.error_code,
'{}\n[ROLLBACK] a\n[ROLLBACK] bb\n[ROLLBACK] ccc'.format(
stack_error_rollback_fail1.message))):
self.workerThread._update_permanent_storage(error)
db_api.update_target_data.assert_called_once_with(
self.template_status_id, status,
error_code=error_code, error_msg=error_message)
db_api.update_target_data.reset_mock()
class TestStatusTransitions(base.BaseTestCase):
def test(self):
for data, expect in [
('A', 'A'),
('AA', 'A(2)'),
('ABC', 'A ~> B ~> C'),
('AABBCC', 'A(2) ~> B(2) ~> C(2)')]:
subject = workerfactory.StatusTransitions(data[0])
for entity in data[1:]:
subject.add(entity)
self.assertEqual(expect, str(subject))
class TestHEATIntermediateStatusChecker(base.BaseTestCase):
def test_scenario(self):
cls = workerfactory.HEATIntermediateStatusChecker
scenario_create = [
base.Dummy(
updated_time=None, stack_status='_'.join((
cls.ACTION_CREATE, cls.STATUS_IN_PROGRESS))),
base.Dummy(
updated_time=None, stack_status='_'.join((
cls.ACTION_CREATE, cls.STATUS_COMPLETE)))]
scenario_create_fail = [
base.Dummy(
updated_time=None, stack_status='_'.join((
cls.ACTION_CREATE, cls.STATUS_IN_PROGRESS))),
base.Dummy(
updated_time=None, stack_status='_'.join((
cls.ACTION_CREATE, cls.STATUS_FAIL)))]
scenario_delete = [
base.Dummy(
updated_time=None, stack_status='_'.join((
cls.ACTION_CREATE, cls.STATUS_COMPLETE))),
base.Dummy(
updated_time=None, stack_status='_'.join((
cls.ACTION_DELETE, cls.STATUS_IN_PROGRESS))),
base.Dummy(
updated_time=None, stack_status='_'.join((
cls.ACTION_DELETE, cls.STATUS_COMPLETE)))]
scenario_delete_fail = [
base.Dummy(
updated_time=None, stack_status='_'.join((
cls.ACTION_CREATE, cls.STATUS_COMPLETE))),
base.Dummy(
updated_time=None, stack_status='_'.join((
cls.ACTION_DELETE, cls.STATUS_IN_PROGRESS))),
base.Dummy(
updated_time=None, stack_status='_'.join((
cls.ACTION_DELETE, cls.STATUS_FAIL)))]
scenario_update = [
base.Dummy(
updated_time=None, stack_status='_'.join((
cls.ACTION_CREATE, cls.STATUS_COMPLETE))),
base.Dummy(
updated_time=None, stack_status='_'.join((
cls.ACTION_UPDATE, cls.STATUS_IN_PROGRESS))),
base.Dummy(
updated_time='2016-06-02T16:30:00Z', stack_status='_'.join((
cls.ACTION_UPDATE, cls.STATUS_COMPLETE)))]
scenario_update_update = [
base.Dummy(
updated_time='2016-06-02T16:30:00Z', stack_status='_'.join((
cls.ACTION_UPDATE, cls.STATUS_COMPLETE))),
base.Dummy(
updated_time='2016-06-02T16:30:00Z', stack_status='_'.join((
cls.ACTION_UPDATE, cls.STATUS_COMPLETE))),
base.Dummy(
updated_time='2016-06-02T16:30:00Z', stack_status='_'.join((
cls.ACTION_UPDATE, cls.STATUS_IN_PROGRESS))),
base.Dummy(
updated_time='2016-06-02T16:30:01Z', stack_status='_'.join((
cls.ACTION_UPDATE, cls.STATUS_COMPLETE)))]
for scenario, operation, is_fail in (
(scenario_create, utils.OPERATION_CREATE, False),
(scenario_create_fail, utils.OPERATION_CREATE, True),
(scenario_delete, utils.OPERATION_DELETE, False),
(scenario_delete_fail, utils.OPERATION_DELETE, True),
(scenario_update, utils.OPERATION_MODIFY, False),
(scenario_update_update, utils.OPERATION_MODIFY, False)):
status_check = cls(scenario[0], operation)
for step in scenario[:-1]:
self.assertEqual(True, status_check(step))
self.assertEqual(False, status_check(scenario[-1]))
self.assertEqual(is_fail, status_check.is_fail)
def test_extract_action_and_status(self):
cls = workerfactory.HEATIntermediateStatusChecker
stack = base.Dummy(stack_status='a_b_c')
action, status = cls._extract_action_and_status(stack)
self.assertEqual('a', action)
self.assertEqual('b_c', status)
def test_extract_action_and_status_fail(self):
cls = workerfactory.HEATIntermediateStatusChecker
stack = base.Dummy(stack_status='abc')
self.assertRaises(
exc.HEATIntegrationError, cls._extract_action_and_status, stack)

23
requirements.txt Normal file
View File

@ -0,0 +1,23 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
#
pbr!=0.7,<1.0,>=0.6
oslo.config>=1.9.3,<1.10.0 # Apache-2.0
oslo.messaging>1.8.0,<1.9.0 # Apache-2.0
oslo.serialization>=1.4.0,<1.5.0 # Apache-2.0
pecan>=1.0.2
python-heatclient>=0.4.0
python-keystoneclient>=1.3.4
python-glanceclient>=0.17.3
werkzeug>=0.11.5
eventlet!=0.17.0,>=0.16.1
oslo.db>=1.7.0,<1.8.0 # Apache-2.0
oslo.log>=1.0.0,<1.1.0 # Apache-2.0
oslo.utils>=1.4.0,<1.5.0 # Apache-2.0
pbr!=0.7,<1.0,>=0.6
SQLAlchemy<=0.9.99,>=0.9.7
sqlalchemy-migrate>=0.9.5
lxml>=2.3
PasteDeploy>=1.5.0
enum34>=0.9.23

252
run_tests.sh Executable file
View File

@ -0,0 +1,252 @@
#!/bin/bash
set -eu
function usage {
echo "Usage: $0 [OPTION]..."
echo "Run ord's test suite(s)"
echo ""
echo " -V, --virtual-env Always use virtualenv. Install automatically if not present"
echo " -N, --no-virtual-env Don't use virtualenv. Run tests in local environment"
echo " -s, --no-site-packages Isolate the virtualenv from the global Python environment"
echo " -f, --force Force a clean re-build of the virtual environment. Useful when dependencies have been added."
echo " -u, --update Update the virtual environment with any newer package versions"
echo " -p, --pep8 Just run PEP8 and HACKING compliance check"
echo " -8, --pep8-only-changed Just run PEP8 and HACKING compliance check on files changed since HEAD~1"
echo " -P, --no-pep8 Don't run static code checks"
echo " -c, --coverage Generate coverage report"
echo " -d, --debug Run tests with testtools instead of testr. This allows you to use the debugger."
echo " -h, --help Print this usage message"
echo " --hide-elapsed Don't print the elapsed time for each test along with slow test list"
echo " --virtual-env-path <path> Location of the virtualenv directory"
echo " Default: \$(pwd)"
echo " --virtual-env-name <name> Name of the virtualenv directory"
echo " Default: .venv"
echo " --tools-path <dir> Location of the tools directory"
echo " Default: \$(pwd)"
echo " --concurrency <concurrency> How many processes to use when running the tests. A value of 0 autodetects concurrency from your CPU count"
echo " Default: 0"
echo ""
echo "Note: with no options specified, the script will try to run the tests in a virtual environment,"
echo " If no virtualenv is found, the script will ask if you would like to create one. If you "
echo " prefer to run tests NOT in a virtual environment, simply pass the -N option."
exit
}
function process_options {
i=1
while [ $i -le $# ]; do
case "${!i}" in
-h|--help) usage;;
-V|--virtual-env) always_venv=1; never_venv=0;;
-N|--no-virtual-env) always_venv=0; never_venv=1;;
-s|--no-site-packages) no_site_packages=1;;
-f|--force) force=1;;
-u|--update) update=1;;
-p|--pep8) just_pep8=1;;
-8|--pep8-only-changed) just_pep8_changed=1;;
-P|--no-pep8) no_pep8=1;;
-c|--coverage) coverage=1;;
-d|--debug) debug=1;;
--virtual-env-path)
(( i++ ))
venv_path=${!i}
;;
--virtual-env-name)
(( i++ ))
venv_dir=${!i}
;;
--tools-path)
(( i++ ))
tools_path=${!i}
;;
--concurrency)
(( i++ ))
concurrency=${!i}
;;
-*) testropts="$testropts ${!i}";;
*) testrargs="$testrargs ${!i}"
esac
(( i++ ))
done
}
tool_path=${tools_path:-$(pwd)}
venv_path=${venv_path:-$(pwd)}
venv_dir=${venv_name:-.venv}
with_venv=tools/with_venv.sh
always_venv=0
never_venv=0
force=0
no_site_packages=0
installvenvopts=
testrargs=
testropts=
wrapper=""
just_pep8=0
just_pep8_changed=0
no_pep8=0
coverage=0
debug=0
update=0
concurrency=0
LANG=en_US.UTF-8
LANGUAGE=en_US:en
LC_ALL=C
process_options $@
# Make our paths available to other scripts we call
export venv_path
export venv_dir
export venv_name
export tools_dir
export venv=${venv_path}/${venv_dir}
if [ $no_site_packages -eq 1 ]; then
installvenvopts="--no-site-packages"
fi
function run_tests {
# Cleanup *pyc
${wrapper} find . -type f -name "*.pyc" -delete
if [ $debug -eq 1 ]; then
if [ "$testropts" = "" ] && [ "$testrargs" = "" ]; then
# Default to running all tests if specific test is not
# provided.
testrargs="discover ./ord/tests/unit"
fi
${wrapper} python -m testtools.run $testropts $testrargs
# Short circuit because all of the testr and coverage stuff
# below does not make sense when running testtools.run for
# debugging purposes.
return $?
fi
if [ $coverage -eq 1 ]; then
TESTRTESTS="$TESTRTESTS --coverage"
else
TESTRTESTS="$TESTRTESTS"
fi
# Just run the test suites in current environment
set +e
testrargs=`echo "$testrargs" | sed -e's/^\s*\(.*\)\s*$/\1/'`
TESTRTESTS="$TESTRTESTS --testr-args='--subunit --concurrency $concurrency $testropts $testrargs'"
if [ setup.cfg -nt ord.egg-info/entry_points.txt ]
then
${wrapper} python setup.py egg_info
fi
echo "Running \`${wrapper} $TESTRTESTS\`"
if ${wrapper} which subunit-2to1 >/dev/null 2>&1
then
# subunit-2to1 is present, testr subunit stream should be in version 2
# format. Convert to version one before colorizing.
bash -c "${wrapper} $TESTRTESTS | ${wrapper} subunit-2to1 | ${wrapper} tools/colorizer.py"
else
bash -c "${wrapper} $TESTRTESTS | ${wrapper} tools/colorizer.py"
fi
RESULT=$?
set -e
copy_subunit_log
if [ $coverage -eq 1 ]; then
echo "Generating coverage report in covhtml/"
# Don't compute coverage for common code, which is tested elsewhere
${wrapper} coverage combine
${wrapper} coverage html --include='ord/*' --omit='ord/openstack/common/*' -d covhtml -i
fi
return $RESULT
}
function copy_subunit_log {
LOGNAME=`cat .testrepository/next-stream`
LOGNAME=$(($LOGNAME - 1))
LOGNAME=".testrepository/${LOGNAME}"
cp $LOGNAME subunit.log
}
function warn_on_flake8_without_venv {
if [ $never_venv -eq 1 ]; then
echo "**WARNING**:"
echo "Running flake8 without virtual env may miss OpenStack HACKING detection"
fi
}
function run_pep8 {
echo "Running flake8 ..."
warn_on_flake8_without_venv
bash -c "${wrapper} flake8"
}
TESTRTESTS="python setup.py testr"
if [ $never_venv -eq 0 ]
then
# Remove the virtual environment if --force used
if [ $force -eq 1 ]; then
echo "Cleaning virtualenv..."
rm -rf ${venv}
fi
if [ $update -eq 1 ]; then
echo "Updating virtualenv..."
python tools/install_venv.py $installvenvopts
fi
if [ -e ${venv} ]; then
wrapper="${with_venv}"
else
if [ $always_venv -eq 1 ]; then
# Automatically install the virtualenv
python tools/install_venv.py $installvenvopts
wrapper="${with_venv}"
else
echo -e "No virtual environment found...create one? (Y/n) \c"
read use_ve
if [ "x$use_ve" = "xY" -o "x$use_ve" = "x" -o "x$use_ve" = "xy" ]; then
# Install the virtualenv and run the test suite in it
python tools/install_venv.py $installvenvopts
wrapper=${with_venv}
fi
fi
fi
fi
# Delete old coverage data from previous runs
if [ $coverage -eq 1 ]; then
${wrapper} coverage erase
fi
if [ $just_pep8 -eq 1 ]; then
run_pep8
exit
fi
if [ $just_pep8_changed -eq 1 ]; then
# NOTE(gilliard) We want use flake8 to check the entirety of every file that has
# a change in it. Unfortunately the --filenames argument to flake8 only accepts
# file *names* and there are no files named (eg) "ranger-agent/compute/manager.py". The
# --diff argument behaves surprisingly as well, because although you feed it a
# diff, it actually checks the file on disk anyway.
files=$(git diff --name-only HEAD~1 | tr '\n' ' ')
echo "Running flake8 on ${files}"
warn_on_flake8_without_venv
bash -c "diff -u --from-file /dev/null ${files} | ${wrapper} flake8 --diff"
exit
fi
run_tests
# NOTE(sirp): we only want to run pep8 when we're running the full-test suite,
# not when we're running tests individually. To handle this, we need to
# distinguish between options (testropts), which begin with a '-', and
# arguments (testrargs).
if [ -z "$testrargs" ]; then
if [ $no_pep8 -eq 0 ]; then
run_pep8
fi
fi

68
setup.cfg Normal file
View File

@ -0,0 +1,68 @@
[metadata]
name = ranger-agent
version = 2016.3.0
summary = Ranger Resource Distributor
description-file =
README.rst
author = OpenStack
author-email = openstack-dev@lists.openstack.org
home-page = http://www.openstack.org/
classifier =
Environment :: OpenStack
Intended Audience :: Information Technology
Intended Audience :: System Administrators
License :: OSI Approved :: Apache Software License
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
[global]
setup-hooks =
pbr.hooks.setup_hook
[files]
packages = ord
data_files =
/etc/ord =
etc/ord.conf
etc/api-paste.ini
[entry_points]
console_scripts=
ord-api = ord.cmd.api:main
ord-engine = ord.cmd.engine:main
ord-db-manage = ord.db.sqlalchemy.impl_sqlalchemy:Connection
ord-dbsync = ord.cmd.manage:dbsync
ord-fake-rds-listener = ord.cmd.fake_rds_listener:main
[build_sphinx]
source-dir = doc/source
build-dir = doc/build
all_files = 1
[upload_sphinx]
upload-dir = doc/build/html
[compile_catalog]
directory = ord/locale
domain = ord
[update_catalog]
domain = ord
output_dir = ord/locale
input_file = ord/locale/ord.pot
[extract_messages]
keywords = _ gettext ngettext l_ lazy_gettext
mapping_file = babel.cfg
output_file = ord/locale/ord.pot
[wheel]
universal = 1
[pbr]
autodoc_index_modules = 1
warnerrors = true

Some files were not shown because too many files have changed in this diff Show More