integration: parse results to canonical jUnit
At the end of the result collection playbook, run a small python script over the collected artifacts. Convert/combine them into a single authorative results file for ci-automation consumption. Where possible, add additional identifying details about the source of individual results. This extra layer of processing also provides possibilities for altering the meaning of PASS/SKIP/FAIL WRT specific ongoing testing needs. Signed-off-by: Chris Evich <cevich@redhat.com>
This commit is contained in:
parent
f1817ab2aa
commit
b738a361c7
6 changed files with 451 additions and 48 deletions
|
@ -2,7 +2,7 @@
|
|||
|
||||
This directory contains playbooks to set up for and run the integration and
|
||||
end-to-end tests for CRI-O on RHEL and Fedora hosts. The expected entry-point
|
||||
is the ``main.yml`` Ansible playbook.
|
||||
is the ``main.yml``.
|
||||
|
||||
##Definitions:
|
||||
|
||||
|
@ -53,6 +53,21 @@ Execution of the ``main.yml`` playbook:
|
|||
other tags. Must build CRI-O from source and run Kubernetes node
|
||||
E2E tests.
|
||||
|
||||
Execution of the ``results.yml`` playbook:
|
||||
|
||||
- Assumes 'setup' previously completed successfully.
|
||||
- Either ``integration``, ``e2e``, or other testing steps
|
||||
must have completed (even if in failure).
|
||||
- Must be the authorative collector and producer of results for the run,
|
||||
whether or not the control-host is the subject.
|
||||
- Must gather all important/relevant artifacts into a central location.
|
||||
- Must not duplicate, rename, or obfuscate any other results or artifact files
|
||||
from this run or any others. Must not fail due to missing files or failed commands.
|
||||
- May add test-run identification details so long as they don't interfear with
|
||||
downstream processing or any of the above requirements.
|
||||
- Must be executed using the ``venv-ansible-playbook.sh`` wrapper (b/c
|
||||
``junitparser`` requirement).
|
||||
|
||||
``cri-o/contrib/test/venv-ansible-playbook.sh`` Wrapper:
|
||||
|
||||
- May be executed on the control-host to both hide and version-lock playbook
|
||||
|
|
|
@ -4,59 +4,96 @@
|
|||
- hosts: '{{ subjects | default("all") }}'
|
||||
vars_files:
|
||||
- "{{ playbook_dir }}/vars.yml"
|
||||
vars:
|
||||
_result_filepaths: [] # do not use
|
||||
_dstfnbuff: [] # do not use
|
||||
environment: '{{ environment_variables }}'
|
||||
tasks:
|
||||
- name: The crio_integration_filepath is required
|
||||
tags:
|
||||
- integration
|
||||
set_fact:
|
||||
_result_filepaths: "{{ _result_filepaths + [crio_integration_filepath] }}"
|
||||
|
||||
- name: The crio_node_e2e_filepath is required
|
||||
tags:
|
||||
- e2e
|
||||
set_fact:
|
||||
_result_filepaths: "{{ _result_filepaths + [crio_node_e2e_filepath] }}"
|
||||
|
||||
- name: Verify expectations
|
||||
assert:
|
||||
that:
|
||||
- 'result_dest_basedir | default(False, True)'
|
||||
- '_result_filepaths | default(False, True)'
|
||||
- '_dstfnbuff == []'
|
||||
- 'results_fetched is undefined'
|
||||
# Combined "is defined" and "isn't blank" check
|
||||
- 'artifacts | default("", True) | trim | length'
|
||||
- 'generated_artifacts | default("", True) | trim | length'
|
||||
- 'extra_artifact_filepaths is defined'
|
||||
- 'parsed_artifacts is defined'
|
||||
- 'canonical_junit is defined'
|
||||
- 'playbook_dir ~ "/../parse2junit.py" | is_file'
|
||||
|
||||
- name: Results directory exists
|
||||
- name: artifacts directory exists
|
||||
file:
|
||||
path: "{{ result_dest_basedir }}"
|
||||
path: "{{ artifacts }}"
|
||||
state: directory
|
||||
delegate_to: localhost
|
||||
|
||||
- name: destination file paths are buffered for overwrite-checking and jUnit conversion
|
||||
set_fact:
|
||||
_dstfnbuff: >
|
||||
{{ _dstfnbuff |
|
||||
union( [result_dest_basedir ~ "/" ~ inventory_hostname ~ "/" ~ item | basename] ) }}
|
||||
with_items: '{{ _result_filepaths }}'
|
||||
- name: Extra artifacts are collected, except missing or with clashing filenames
|
||||
command: 'cp --no-clobber --verbose "{{ item }}" "{{ artifacts }}/"'
|
||||
ignore_errors: True
|
||||
with_items: '{{ extra_artifact_filepaths }}'
|
||||
|
||||
- name: Overwriting existing results assumed very very bad
|
||||
fail:
|
||||
msg: "Cowardly refusing to overwrite {{ item }}"
|
||||
when: item | exists
|
||||
delegate_to: localhost
|
||||
with_items: '{{ _dstfnbuff }}'
|
||||
- name: Generated artifacts directory exists
|
||||
file:
|
||||
path: "{{ artifacts }}/generated"
|
||||
state: directory
|
||||
|
||||
# fetch module doesn't support directories
|
||||
- name: Retrieve results from all hosts
|
||||
- name: Generated artifacts are produced
|
||||
shell: '{{ item.value }} || true &> {{ item.key }}'
|
||||
args:
|
||||
chdir: "{{ artifacts }}/generated"
|
||||
creates: "{{ artifacts }}/generated/{{ item.key }}"
|
||||
ignore_errors: True
|
||||
with_dict: "{{ generated_artifacts }}"
|
||||
|
||||
- name: Subject produces a single canonical jUnit file by combining parsed_artifacts
|
||||
script: '{{ playbook_dir }}/../parse2junit.py {{ parsed_artifacts | join(" ") }} "{{ canonical_junit }}"'
|
||||
args:
|
||||
chdir: "{{ artifacts }}"
|
||||
|
||||
|
||||
- hosts: '{{ control_host | default("none") }}'
|
||||
vars_files:
|
||||
- "{{ playbook_dir }}/vars.yml"
|
||||
environment: '{{ environment_variables }}'
|
||||
tasks:
|
||||
|
||||
- name: Verify expectations
|
||||
assert:
|
||||
that:
|
||||
# Combined "is defined" and "isn't blank" check
|
||||
- 'artifacts | default("", True) | trim | length'
|
||||
- 'canonical_junit is defined'
|
||||
- 'playbook_dir ~ "/../parse2junit.py" | is_file'
|
||||
|
||||
- name: A subdirectory exists for this subject's artifacts
|
||||
file:
|
||||
path: "{{ collection_dirpath }}"
|
||||
state: directory
|
||||
|
||||
- name: Artifacts are retrieved from subjects
|
||||
synchronize:
|
||||
checksum: True # Don't rely on date/time being in sync
|
||||
archive: False # Don't bother with permissions or times
|
||||
checksum: True # Don't rely on date/time being in sync
|
||||
copy_links: True # We want files, not links to files
|
||||
recursive: True
|
||||
mode: pull
|
||||
dest: '{{ result_dest_basedir }}/{{ inventory_hostname }}/' # must end in /
|
||||
src: '{{ item }}'
|
||||
register: results_fetched
|
||||
with_items: '{{ _result_filepaths }}'
|
||||
dest: '{{ collection_dirpath }}'
|
||||
src: '{{ artifacts }}'
|
||||
rsync_opts: '--ignore-missing-args'
|
||||
delegate_to: '{{ item }}'
|
||||
with_inventory_hostnames:
|
||||
- '{{ subjects | default("all:!localhost") }}'
|
||||
|
||||
- name: The paths of canonical_junit files from all subjects are found
|
||||
find:
|
||||
paths:
|
||||
- '{{ collection_dirpath }}'
|
||||
patterns: "{{ canonical_junit | basename }}"
|
||||
recurse: True
|
||||
register: result
|
||||
|
||||
- name: Found paths are joined together into a single string
|
||||
set_fact:
|
||||
result: '{{ result.files | map(attribute="path") | join(" ") }}'
|
||||
|
||||
- name: The control host produces a top-level junit, combining all subject's canonical_junits
|
||||
script: '{{ playbook_dir }}/../parse2junit.py {{ result }} "./{{ canonical_junit | basename }}"'
|
||||
args:
|
||||
chdir: "{{ collection_dirpath }}"
|
||||
when: result | trim | length
|
||||
|
|
|
@ -23,7 +23,39 @@ cri_o_dest_path: "{{ go_path }}/src/github.com/kubernetes-incubator/cri-o"
|
|||
|
||||
# For results.yml Paths use rsync 'source' conventions
|
||||
artifacts: "/tmp/artifacts" # Base-directory for collection
|
||||
crio_integration_filepath: "{{ artifacts }}/testout.txt"
|
||||
crio_node_e2e_filepath: "{{ artifacts }}/junit_01.xml"
|
||||
result_dest_basedir: '{{ lookup("env","WORKSPACE") |
|
||||
default(playbook_dir, True) }}/artifacts'
|
||||
|
||||
# List of absolute paths to extra filenames to collect into {{ artifacts }}.
|
||||
# Non-existing files and any name-collisions will be skipped.
|
||||
extra_artifact_filepaths:
|
||||
- "/go/src/k8s.io/kubernetes/e2e.log"
|
||||
- "/tmp/kubelet.log"
|
||||
- "/tmp/kube-apiserver.log"
|
||||
- "/tmp/kube-controller-manager.log"
|
||||
- "/tmp/kube-proxy.log"
|
||||
- "/tmp/kube-proxy.yaml"
|
||||
- "/tmp/kube-scheduler.log"
|
||||
|
||||
# Mapping of generated artifact filenames and their commands. All
|
||||
# are relative to {{ artifacts }}/generated/
|
||||
generated_artifacts:
|
||||
installed_packages.log: '$(type -P dnf || type -P yum) list installed'
|
||||
avc_denials.log: 'ausearch -m AVC -m SELINUX_ERR -m USER_AVC'
|
||||
filesystem.info: 'df -h && sudo pvs && sudo vgs && sudo lvs'
|
||||
pid1.journal: 'journalctl _PID=1 --no-pager --all --lines=all'
|
||||
crio.service: 'journalctl --unit crio.service --no-pager --all --lines=all'
|
||||
customcluster.service: 'journalctl --unit customcluster.service --no-pager --all --lines=all'
|
||||
systemd-journald.service: 'journalctl --unit systemd-journald.service --no-pager --all --lines=all'
|
||||
|
||||
# Use ``parse2junits.py`` on these artifact files
|
||||
# to produce the '{{ canonical_junit }}' file.
|
||||
parsed_artifacts:
|
||||
- "./testout.txt"
|
||||
- "./junit_01.xml"
|
||||
|
||||
# jUnit artifact file for ``combine_junits.py`` output
|
||||
canonical_junit: "./junit_01.xml"
|
||||
|
||||
# When subject != localhost, synchronize "{{ artifacts }}" from
|
||||
# all subjects into this directory on the control-host.
|
||||
collection_dirpath: '{{ lookup("env","WORKSPACE") |
|
||||
default(playbook_dir, True) }}/artifacts/{{ inventory_hostname }}'
|
||||
|
|
313
contrib/test/parse2junit.py
Executable file
313
contrib/test/parse2junit.py
Executable file
|
@ -0,0 +1,313 @@
|
|||
#!/usr/bin/env python2
|
||||
|
||||
# encoding: utf-8
|
||||
|
||||
# N/B: Assumes script was called from cri-o repository on the test subject,
|
||||
# with a remote name of 'origin. It's executing under the results.py
|
||||
# playbook, which in turn was executed by venv-ansible-playbook.sh
|
||||
# i.e. everything in requirements.txt is already available
|
||||
#
|
||||
# Also Requires:
|
||||
# python 2.7+
|
||||
# git
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import re
|
||||
import contextlib
|
||||
import uuid
|
||||
from socket import gethostname
|
||||
import subprocess
|
||||
from tempfile import NamedTemporaryFile
|
||||
# Ref: https://github.com/gastlygem/junitparser
|
||||
import junitparser
|
||||
|
||||
# Parser function suffixes and regex patterns of supported input filenames
|
||||
TEST_TYPE_FILE_RE = dict(integration=re.compile(r'testout\.txt'),
|
||||
e2e=re.compile(r'junit_\d+.xml'))
|
||||
INTEGRATION_TEST_COUNT_RE = re.compile(r'^(?P<start>\d+)\.\.(?P<end>\d+)')
|
||||
INTEGRATION_SKIP_RE = re.compile(r'^(?P<stat>ok|not ok) (?P<tno>\d+) # skip'
|
||||
r' (?P<sreason>\(.+\)) (?P<desc>.+)')
|
||||
INTEGRATION_RESULT_RE = re.compile(r'^(?P<stat>ok|not ok) (?P<tno>\d+) (?P<desc>.+)')
|
||||
|
||||
|
||||
def d(msg):
|
||||
if msg:
|
||||
try:
|
||||
sys.stderr.write('{}\n'.format(msg))
|
||||
sys.stderr.flush()
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def if_match(line, regex):
|
||||
# __enter__
|
||||
match = regex.search(line)
|
||||
if match:
|
||||
yield match
|
||||
else:
|
||||
yield None
|
||||
# __exit__
|
||||
pass # Do nothing
|
||||
|
||||
|
||||
def if_case_add(suite, line_parser, *parser_args, **parser_dargs):
|
||||
case = line_parser(*parser_args, **parser_dargs)
|
||||
if case:
|
||||
suite.add_testcase(case)
|
||||
|
||||
|
||||
def parse_integration_line(line, classname):
|
||||
name_fmt = "[CRI-O] [integration] #{} {}"
|
||||
with if_match(line, INTEGRATION_SKIP_RE) as match:
|
||||
if match:
|
||||
name = name_fmt.format(match.group('tno'), match.group('desc'))
|
||||
case = junitparser.TestCase(name)
|
||||
case.classname = classname
|
||||
case.result = junitparser.Skipped(message=match.group('sreason'))
|
||||
case.system_err = match.group('stat')
|
||||
return case
|
||||
with if_match(line, INTEGRATION_RESULT_RE) as match:
|
||||
if match:
|
||||
name = name_fmt.format(match.group('tno'), match.group('desc'))
|
||||
case = junitparser.TestCase(name)
|
||||
case.classname = classname
|
||||
case.system_err = match.group('stat')
|
||||
if match.group('stat') == 'not ok':
|
||||
# Can't think of anything better to put here
|
||||
case.result = junitparser.Failed('not ok')
|
||||
elif not match.group('stat') == 'ok':
|
||||
case.result = junitparser.Error(match.group('stat'))
|
||||
return case
|
||||
return None
|
||||
|
||||
|
||||
# N/B: name suffix corresponds to key in TEST_TYPE_FILE_RE
|
||||
def parse_integration(input_file_path, hostname):
|
||||
suite = junitparser.TestSuite('CRI-O Integration suite')
|
||||
suite.hostname = hostname
|
||||
suite_stdout = []
|
||||
classname = 'CRI-O integration suite'
|
||||
n_tests = -1 # No tests ran
|
||||
d(" Processing integration results for {}".format(suite.hostname))
|
||||
with open(input_file_path) as testout_txt:
|
||||
for line in testout_txt:
|
||||
line = line.strip()
|
||||
suite_stdout.append(line) # Basically a copy of the file
|
||||
# n_tests must come first
|
||||
with if_match(line, INTEGRATION_TEST_COUNT_RE) as match:
|
||||
if match:
|
||||
n_tests = int(match.group('end')) - int(match.group('start')) + 1
|
||||
d(" Collecting results from {} tests".format(n_tests))
|
||||
break
|
||||
if n_tests > 0:
|
||||
for line in testout_txt:
|
||||
line = line.strip()
|
||||
suite_stdout.append(line)
|
||||
if_case_add(suite, parse_integration_line,
|
||||
line=line, classname=classname)
|
||||
else:
|
||||
d(" Uh oh, no results found, skipping.")
|
||||
return None
|
||||
# TODO: No date/time recorded in file
|
||||
#stat = os.stat(input_file_path)
|
||||
#test_start = stat.st_mtime
|
||||
#test_end = stat.st_atime
|
||||
#duration = test_end - test_start
|
||||
suite.time = 0
|
||||
suite.add_property('stdout', '\n'.join(suite_stdout))
|
||||
|
||||
d(" Parsed {} integration test cases".format(len(suite)))
|
||||
return suite
|
||||
|
||||
|
||||
def flatten_testsuites(testsuites):
|
||||
# The jUnit format allows nesting testsuites, squash into a list for simplicity
|
||||
if isinstance(testsuites, junitparser.TestSuite):
|
||||
testsuite = testsuites # for clarity
|
||||
return [testsuite]
|
||||
result = []
|
||||
for testsuite in testsuites:
|
||||
if isinstance(testsuite, junitparser.TestSuite):
|
||||
result.append(testsuite)
|
||||
elif isinstance(testsuite, junitparser.JUnitXml):
|
||||
nested_suites = flatten_testsuites(testsuite)
|
||||
if nested_suites:
|
||||
result += nested_suites
|
||||
return result
|
||||
|
||||
|
||||
def find_k8s_e2e_suite(testsuites):
|
||||
testsuites = flatten_testsuites(testsuites)
|
||||
for testsuite in testsuites:
|
||||
if testsuite.name and 'Kubernetes e2e' in testsuite.name:
|
||||
return testsuite
|
||||
# Name could be None or wrong, check classnames of all tests
|
||||
classnames = ['Kubernetes e2e' in x.classname.strip() for x in testsuite]
|
||||
if all(classnames):
|
||||
return testsuite
|
||||
return None
|
||||
|
||||
|
||||
# N/B: name suffix corresponds to key in TEST_TYPE_FILE_RE
|
||||
def parse_e2e(input_file_path, hostname):
|
||||
# Load junit_xx.xml file, update contents with more identifying info.
|
||||
try:
|
||||
testsuites = junitparser.JUnitXml.fromfile(input_file_path)
|
||||
suite = find_k8s_e2e_suite(testsuites)
|
||||
except junitparser.JUnitXmlError, xcept:
|
||||
d(" Error parsing {}, skipping it.: {}".format(input_file_path, xcept))
|
||||
return None
|
||||
if not suite:
|
||||
d(" Failed to find any e2e results in {}".format(input_file_path))
|
||||
return None
|
||||
if not suite.hostname:
|
||||
suite.hostname = hostname
|
||||
if not suite.name:
|
||||
suite.name = 'Kubernetes e2e suite'
|
||||
d(" Processing e2e results for {}".format(suite.hostname))
|
||||
for testcase in suite:
|
||||
if not testcase.classname:
|
||||
d(" Adding missing classname to case {}".format(testcase.name))
|
||||
testcase.classname = "Kubernetes e2e suite"
|
||||
d(" Parsed {} e2e test cases".format(len(suite)))
|
||||
if not suite.time:
|
||||
stat = os.stat(input_file_path)
|
||||
test_start = stat.st_ctime
|
||||
test_end = stat.st_mtime
|
||||
duration = test_end - test_start
|
||||
if duration:
|
||||
suite.time = duration
|
||||
return testsuites # Retain original structure
|
||||
|
||||
def parse_test_output(ifps, results_name, hostname):
|
||||
time_total = 0
|
||||
testsuites = junitparser.JUnitXml(results_name)
|
||||
# Cheat, lookup parser function name suffix from global namespace
|
||||
_globals = globals()
|
||||
for input_file_path in ifps:
|
||||
if not os.path.isfile(input_file_path):
|
||||
d(" The file {} doesn't appear to exist, skipping it.".format(input_file_path))
|
||||
continue
|
||||
parser = None
|
||||
for tname, regex in TEST_TYPE_FILE_RE.items():
|
||||
if regex.search(input_file_path):
|
||||
parser = _globals.get('parse_{}'.format(tname))
|
||||
break
|
||||
else:
|
||||
d(" Could not find parser to handle input"
|
||||
" file {}, skipping.".format(input_file_path))
|
||||
continue
|
||||
|
||||
d(" Parsing {} using {}".format(input_file_path, parser))
|
||||
for parsed_testsuite in flatten_testsuites(parser(input_file_path, hostname)):
|
||||
d(" Adding {} suite for {}".format(parsed_testsuite.name, parsed_testsuite.hostname))
|
||||
testsuites.add_testsuite(parsed_testsuite)
|
||||
if parsed_testsuite.time:
|
||||
time_total += parsed_testsuite.time
|
||||
testsuites.time = time_total
|
||||
return testsuites
|
||||
|
||||
def make_host_name():
|
||||
subject = '{}'.format(gethostname())
|
||||
# Origin-CI doesn't use very distinguishable hostnames :(
|
||||
if 'openshiftdevel' in subject or 'ip-' in subject:
|
||||
try:
|
||||
with open('/etc/machine-id') as machineid:
|
||||
subject = 'machine-id-{}'.format(machineid.read().strip())
|
||||
except IOError: # Worst-case, but we gotta pick sumpfin
|
||||
subject = 'uuid-{}'.format(uuid.uuid4())
|
||||
return subject
|
||||
|
||||
def make_results_name(argv):
|
||||
script_dir = os.path.dirname(argv[0])
|
||||
spco = lambda cmd: subprocess.check_output(cmd.split(' '),
|
||||
stderr=subprocess.STDOUT,
|
||||
close_fds=True,
|
||||
cwd=script_dir,
|
||||
universal_newlines=True)
|
||||
pr_no = None
|
||||
head_id = None
|
||||
try:
|
||||
head_id = spco('git rev-parse HEAD')
|
||||
for line in spco('git ls-remote origin refs/pull/[0-9]*/head').strip().splitlines():
|
||||
cid, ref = line.strip().split(None, 1)
|
||||
if head_id in cid:
|
||||
pr_no = ref.strip().split('/')[2]
|
||||
break
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
|
||||
if pr_no:
|
||||
return "CRI-O Pull Request {}".format(pr_no)
|
||||
elif head_id:
|
||||
return "CRI-O Commit {}".format(head_id[:8])
|
||||
else: # Worst-case, but we gotta pick sumpfin
|
||||
return "CRI-O Run ID {}".format(uuid.uuid4())
|
||||
|
||||
|
||||
def main(argv):
|
||||
reload(sys)
|
||||
sys.setdefaultencoding('utf8')
|
||||
parser = argparse.ArgumentParser(epilog='Note: The parent directory of input files is'
|
||||
'assumed to be the test suite name')
|
||||
parser.add_argument('-f', '--fqdn',
|
||||
help="Alternative hostname to add to results if none present",
|
||||
default=make_host_name())
|
||||
parser.add_argument('-b', '--backup', action="store_true",
|
||||
help="If output file name matches any input file, backup with"
|
||||
" 'original_' prefix",
|
||||
default=False)
|
||||
parser.add_argument('ifps', nargs='+',
|
||||
help='Input file paths to test output from {}.'
|
||||
''.format(TEST_TYPE_FILE_RE.keys()))
|
||||
parser.add_argument('ofp', nargs=1,
|
||||
default='-',
|
||||
help='Output file path for jUnit XML, or "-" for stdout')
|
||||
options = parser.parse_args(argv[1:])
|
||||
ofp = options.ofp[0] # nargs==1 still puts it into a list
|
||||
results_name = make_results_name(argv)
|
||||
|
||||
d("Parsing {} to {}".format(options.ifps, ofp))
|
||||
d("Using results name: {} and hostname {}".format(results_name, options.fqdn))
|
||||
# Parse all results
|
||||
new_testsuites = parse_test_output(options.ifps, results_name, options.fqdn)
|
||||
|
||||
if not len(new_testsuites):
|
||||
d("Uh oh, doesn't look like anything was processed. Bailing out")
|
||||
return None
|
||||
|
||||
d("Parsed {} suites".format(len(new_testsuites)))
|
||||
|
||||
# etree can't handle files w/o filenames :(
|
||||
tmp = NamedTemporaryFile(suffix='.tmp', prefix=results_name, bufsize=1)
|
||||
new_testsuites.write(tmp.name)
|
||||
tmp.seek(0)
|
||||
del new_testsuites # close up any open files
|
||||
if ofp == '-':
|
||||
sys.stdout.write('\n{}\n'.format(tmp.read()))
|
||||
else:
|
||||
for ifp in options.ifps:
|
||||
if not os.path.isfile(ofp):
|
||||
break
|
||||
if os.path.samefile(ifp, ofp):
|
||||
if not options.backup:
|
||||
d("Warning {} will be will be combined with other input files."
|
||||
"".format(ofp))
|
||||
break
|
||||
dirname = os.path.dirname(ofp)
|
||||
basename = os.path.basename(ofp)
|
||||
origname = 'original_{}'.format(basename)
|
||||
os.rename(ofp, os.path.join(dirname, origname))
|
||||
break
|
||||
with open(ofp, 'w', 1) as output_file:
|
||||
output_file.truncate(0)
|
||||
output_file.flush()
|
||||
d("Writing {}".format(ofp))
|
||||
output_file.write(tmp.read())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv)
|
|
@ -52,3 +52,7 @@ virtualenv==15.1.0 --hash=sha256:39d88b533b422825d644087a21e78c45cf5af0ef7a99a1f
|
|||
--hash=sha256:02f8102c2436bb03b3ee6dede1919d1dac8a427541652e5ec95171ec8adbc93a
|
||||
|
||||
pip==9.0.1 --hash=sha256:690b762c0a8460c303c089d5d0be034fb15a5ea2b75bdf565f40421f542fefb0
|
||||
|
||||
future==0.16.0 --hash=sha256:e39ced1ab767b5936646cedba8bcce582398233d6a627067d4c6a454c90cfedb
|
||||
|
||||
junitparser==1.0.0 --hash=sha256:5b0f0ffeef3548878b5ae2cac40b5b128ae18337e2a260a8265f5519b52c907c
|
||||
|
|
|
@ -13,8 +13,9 @@
|
|||
# All errors are fatal
|
||||
set -e
|
||||
|
||||
SCRIPT_PATH=`realpath $(dirname $0)`
|
||||
REQUIREMENTS="$SCRIPT_PATH/requirements.txt"
|
||||
export SCRIPT_PATH=`realpath $(dirname $0)`
|
||||
export REQUIREMENTS="$SCRIPT_PATH/requirements.txt"
|
||||
export ANSIBLE_CONFIG="$SCRIPT_PATH/integration/ansible.cfg"
|
||||
|
||||
echo
|
||||
|
||||
|
@ -47,7 +48,8 @@ else
|
|||
fi
|
||||
|
||||
# Create a directory to contain logs and test artifacts
|
||||
export ARTIFACTS=$(mkdir -pv $WORKSPACE/artifacts | tail -1 | cut -d \' -f 2)
|
||||
[ -n "$ARTIFACTS" ] || export ARTIFACTS="$WORKSPACE/artifacts"
|
||||
[ -d "$ARTIFACTS" ] || mkdir -pv "$ARTIFACTS"
|
||||
[ -d "$ARTIFACTS" ] || exit 3
|
||||
|
||||
# All command failures from now on are fatal
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue