mirror of
				https://github.com/optim-enterprises-bv/kubernetes.git
				synced 2025-11-03 19:58:17 +00:00 
			
		
		
		
	Merge pull request #24998 from spxtr/remove-test-history
Automatic merge from submit-queue Move test-history code into the test-infra repository. ~~Waiting on https://github.com/kubernetes/test-infra/pull/3~~ ready to go
This commit is contained in:
		@@ -1,38 +0,0 @@
 | 
			
		||||
#!/bin/bash
 | 
			
		||||
 | 
			
		||||
# Copyright 2016 The Kubernetes Authors All rights reserved.
 | 
			
		||||
#
 | 
			
		||||
# Licensed under the Apache License, Version 2.0 (the "License");
 | 
			
		||||
# you may not use this file except in compliance with the License.
 | 
			
		||||
# You may obtain a copy of the License at
 | 
			
		||||
#
 | 
			
		||||
#     http://www.apache.org/licenses/LICENSE-2.0
 | 
			
		||||
#
 | 
			
		||||
# Unless required by applicable law or agreed to in writing, software
 | 
			
		||||
# distributed under the License is distributed on an "AS IS" BASIS,
 | 
			
		||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
			
		||||
# See the License for the specific language governing permissions and
 | 
			
		||||
# limitations under the License.
 | 
			
		||||
 | 
			
		||||
# Compiles a static HTML site containing the last day's worth of test results.
 | 
			
		||||
# Pass the URL of Jenkins into $1
 | 
			
		||||
 | 
			
		||||
set -o errexit
 | 
			
		||||
set -o nounset
 | 
			
		||||
 | 
			
		||||
readonly jenkins="$1"
 | 
			
		||||
readonly datestr=$(date +"%Y-%m-%d")
 | 
			
		||||
 | 
			
		||||
# Create JSON report
 | 
			
		||||
time python gen_json.py \
 | 
			
		||||
  "--server=${jenkins}" \
 | 
			
		||||
  "--match=^kubernetes|kubernetes-build|kubelet-gce-e2e-ci"
 | 
			
		||||
 | 
			
		||||
# Create static HTML reports out of the JSON
 | 
			
		||||
python gen_html.py --output-dir=static --input=tests.json
 | 
			
		||||
 | 
			
		||||
# Upload to GCS
 | 
			
		||||
readonly bucket="kubernetes-test-history"
 | 
			
		||||
readonly gcs_acl="public-read"
 | 
			
		||||
gsutil -q cp -a "${gcs_acl}" -z json "tests.json" "gs://${bucket}/logs/${datestr}.json"
 | 
			
		||||
gsutil -q cp -ra "${gcs_acl}" "static" "gs://${bucket}/"
 | 
			
		||||
@@ -1,285 +0,0 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
 | 
			
		||||
# Copyright 2016 The Kubernetes Authors All rights reserved.
 | 
			
		||||
#
 | 
			
		||||
# Licensed under the Apache License, Version 2.0 (the "License");
 | 
			
		||||
# you may not use this file except in compliance with the License.
 | 
			
		||||
# You may obtain a copy of the License at
 | 
			
		||||
#
 | 
			
		||||
#     http://www.apache.org/licenses/LICENSE-2.0
 | 
			
		||||
#
 | 
			
		||||
# Unless required by applicable law or agreed to in writing, software
 | 
			
		||||
# distributed under the License is distributed on an "AS IS" BASIS,
 | 
			
		||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
			
		||||
# See the License for the specific language governing permissions and
 | 
			
		||||
# limitations under the License.
 | 
			
		||||
 | 
			
		||||
"""Creates an HTML report for all jobs starting with a given prefix.
 | 
			
		||||
 | 
			
		||||
Reads the JSON from tests.json, and prints the HTML to stdout.
 | 
			
		||||
 | 
			
		||||
This code is pretty nasty, but gets the job done.
 | 
			
		||||
 | 
			
		||||
It would be really spiffy if this used an HTML template system, but for now
 | 
			
		||||
we're old-fashioned. We could also generate these with JS, directly from the
 | 
			
		||||
JSON. That would allow custom filtering and stuff like that.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import print_function
 | 
			
		||||
 | 
			
		||||
import argparse
 | 
			
		||||
import cgi
 | 
			
		||||
import collections
 | 
			
		||||
import json
 | 
			
		||||
import os
 | 
			
		||||
import string
 | 
			
		||||
import sys
 | 
			
		||||
import time
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
TestMetadata = collections.namedtuple('TestMetadata', [
 | 
			
		||||
    'okay',
 | 
			
		||||
    'unstable',
 | 
			
		||||
    'failed',
 | 
			
		||||
    'skipped',
 | 
			
		||||
])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def gen_tests(data, prefix, exact_match):
 | 
			
		||||
    """Creates the HTML for all test cases.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        data: Parsed JSON data that was created by gen_json.py.
 | 
			
		||||
        prefix: Considers Jenkins jobs that start with this.
 | 
			
		||||
        exact_match: Only match Jenkins jobs with name equal to prefix.
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        (html, TestMetadata) for matching tests
 | 
			
		||||
    """
 | 
			
		||||
    html = ['<ul class="test">']
 | 
			
		||||
    totals = collections.defaultdict(int)
 | 
			
		||||
    for test in sorted(data, key=string.lower):
 | 
			
		||||
        test_html = ['<ul class="suite">']
 | 
			
		||||
        has_test = False
 | 
			
		||||
        has_failed = False
 | 
			
		||||
        has_unstable = False
 | 
			
		||||
        for suite in sorted(data[test]):
 | 
			
		||||
            if not suite.startswith(prefix):
 | 
			
		||||
                continue
 | 
			
		||||
            if exact_match and suite != prefix:
 | 
			
		||||
                continue
 | 
			
		||||
            has_test = True
 | 
			
		||||
            num_failed = 0
 | 
			
		||||
            num_builds = 0
 | 
			
		||||
            total_time = 0
 | 
			
		||||
            for build in data[test][suite]:
 | 
			
		||||
                num_builds += 1
 | 
			
		||||
                if build['failed']:
 | 
			
		||||
                    num_failed += 1
 | 
			
		||||
                total_time += build['time']
 | 
			
		||||
            avg_time = total_time / num_builds
 | 
			
		||||
            unit = 's'
 | 
			
		||||
            if avg_time > 60:
 | 
			
		||||
                avg_time /= 60
 | 
			
		||||
                unit = 'm'
 | 
			
		||||
            if num_failed == num_builds:
 | 
			
		||||
                has_failed = True
 | 
			
		||||
                status = 'failed'
 | 
			
		||||
            elif num_failed > 0:
 | 
			
		||||
                has_unstable = True
 | 
			
		||||
                status = 'unstable'
 | 
			
		||||
            else:
 | 
			
		||||
                status = 'okay'
 | 
			
		||||
            test_html.append('<li class="suite">')
 | 
			
		||||
            test_html.append('<span class="%s">%d/%d</span>' % (
 | 
			
		||||
                status, num_builds - num_failed, num_builds))
 | 
			
		||||
            test_html.append(
 | 
			
		||||
                '<span class="time">%.0f%s</span>' % (avg_time, unit))
 | 
			
		||||
            test_html.append(suite)
 | 
			
		||||
            test_html.append('</li>')
 | 
			
		||||
        test_html.append('</ul>')
 | 
			
		||||
        if has_failed:
 | 
			
		||||
            status = 'failed'
 | 
			
		||||
        elif has_unstable:
 | 
			
		||||
            status = 'unstable'
 | 
			
		||||
        elif has_test:
 | 
			
		||||
            status = 'okay'
 | 
			
		||||
        else:
 | 
			
		||||
            status = 'skipped'
 | 
			
		||||
        totals[status] += 1
 | 
			
		||||
        html.append('<li class="test %s">' % status)
 | 
			
		||||
        if exact_match and len(test_html) > 2:
 | 
			
		||||
            if not (test_html[2].startswith('<span') and
 | 
			
		||||
                    test_html[3].startswith('<span')):
 | 
			
		||||
                raise ValueError(
 | 
			
		||||
                    'couldn\'t extract suite results for prepending')
 | 
			
		||||
            html.extend(test_html[2:4])
 | 
			
		||||
            html.append(test)
 | 
			
		||||
        else:
 | 
			
		||||
            html.append(test)
 | 
			
		||||
            html.extend(test_html)
 | 
			
		||||
        html.append('</li>')
 | 
			
		||||
    html.append('</ul>')
 | 
			
		||||
    return '\n'.join(html), TestMetadata(
 | 
			
		||||
        totals['okay'], totals['unstable'], totals['failed'], totals['skipped'])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def html_header(title, script):
 | 
			
		||||
    """Return html header items."""
 | 
			
		||||
    html = ['<html>', '<head>']
 | 
			
		||||
    html.append('<link rel="stylesheet" type="text/css" href="style.css" />')
 | 
			
		||||
    if title:
 | 
			
		||||
        html.append('<title>%s</title>' % cgi.escape(title))
 | 
			
		||||
    if script:
 | 
			
		||||
        html.append('<script src="script.js"></script>')
 | 
			
		||||
    html.append('</head>')
 | 
			
		||||
    html.append('<body>')
 | 
			
		||||
    return html
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def gen_html(data, prefix, exact_match=False):
 | 
			
		||||
    """Creates the HTML for the entire page.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        Same as gen_tests.
 | 
			
		||||
    Returns:
 | 
			
		||||
        Same as gen_tests.
 | 
			
		||||
    """
 | 
			
		||||
    tests_html, meta = gen_tests(data, prefix, exact_match)
 | 
			
		||||
    if exact_match:
 | 
			
		||||
        msg = 'Suite %s' % cgi.escape(prefix)
 | 
			
		||||
    elif prefix:
 | 
			
		||||
        msg = 'Suites starting with %s' % cgi.escape(prefix)
 | 
			
		||||
    else:
 | 
			
		||||
        msg = 'All suites'
 | 
			
		||||
    html = html_header(title=msg, script=True)
 | 
			
		||||
    html.append('<div id="header">%s:' % msg)
 | 
			
		||||
    fmt = '<span class="total %s" onclick="toggle(\'%s\');">%s</span>'
 | 
			
		||||
    html.append(fmt % ('okay', 'okay', meta.okay))
 | 
			
		||||
    html.append(fmt % ('unstable', 'unstable', meta.unstable))
 | 
			
		||||
    html.append(fmt % ('failed', 'failed', meta.failed))
 | 
			
		||||
    html.append(fmt % ('skipped', 'skipped', meta.skipped))
 | 
			
		||||
    html.append('</div>')
 | 
			
		||||
    html.append(tests_html)
 | 
			
		||||
    html.append('</body>')
 | 
			
		||||
    html.append('</html>')
 | 
			
		||||
    return '\n'.join(html), meta
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def gen_metadata_links(suites):
 | 
			
		||||
    """Write clickable pass, ustabled, failed stats."""
 | 
			
		||||
    html = []
 | 
			
		||||
    for (name, target), meta in sorted(suites.iteritems()):
 | 
			
		||||
        html.append('<a class="suite-link" href="%s">' % target)
 | 
			
		||||
        html.append('<span class="total okay">%d</span>' % meta.okay)
 | 
			
		||||
        html.append('<span class="total unstable">%d</span>' % meta.unstable)
 | 
			
		||||
        html.append('<span class="total failed">%d</span>' % meta.failed)
 | 
			
		||||
        html.append(name)
 | 
			
		||||
        html.append('</a>')
 | 
			
		||||
    return html
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def write_html(outdir, path, html):
 | 
			
		||||
    """Write html to outdir/path."""
 | 
			
		||||
    with open(os.path.join(outdir, path), 'w') as buf:
 | 
			
		||||
        buf.write(html)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def write_metadata(infile, outdir):
 | 
			
		||||
    """Writes tests-*.html and suite-*.html files.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
      infile: the json file created by gen_json.py
 | 
			
		||||
      outdir: a path to write the html files.
 | 
			
		||||
    """
 | 
			
		||||
    with open(infile) as buf:
 | 
			
		||||
        data = json.load(buf)
 | 
			
		||||
 | 
			
		||||
    prefix_metadata = {}
 | 
			
		||||
    prefixes = [
 | 
			
		||||
        'kubernetes',
 | 
			
		||||
        'kubernetes-e2e',
 | 
			
		||||
        'kubernetes-soak',
 | 
			
		||||
        'kubernetes-e2e-gce',
 | 
			
		||||
        'kubernetes-e2e-gke',
 | 
			
		||||
        'kubernetes-upgrade',
 | 
			
		||||
    ]
 | 
			
		||||
    for prefix in prefixes:
 | 
			
		||||
        path = 'tests-%s.html' % prefix
 | 
			
		||||
        html, metadata = gen_html(data, prefix, False)
 | 
			
		||||
        write_html(outdir, path, html)
 | 
			
		||||
        prefix_metadata[prefix or 'kubernetes', path] = metadata
 | 
			
		||||
 | 
			
		||||
    suite_metadata = {}
 | 
			
		||||
    suites = set()
 | 
			
		||||
    for suite_names in data.values():
 | 
			
		||||
        suites.update(suite_names.keys())
 | 
			
		||||
    for suite in sorted(suites):
 | 
			
		||||
        path = 'suite-%s.html' % suite
 | 
			
		||||
        html, metadata = gen_html(data, suite, True)
 | 
			
		||||
        write_html(outdir, path, html)
 | 
			
		||||
        suite_metadata[suite, path] = metadata
 | 
			
		||||
 | 
			
		||||
    blocking = {
 | 
			
		||||
        'kubelet-gce-e2e-ci',
 | 
			
		||||
        'kubernetes-build',
 | 
			
		||||
        'kubernetes-e2e-gce',
 | 
			
		||||
        'kubernetes-e2e-gce-scalability',
 | 
			
		||||
        'kubernetes-e2e-gce-slow',
 | 
			
		||||
        'kubernetes-e2e-gke',
 | 
			
		||||
        'kubernetes-e2e-gke-slow',
 | 
			
		||||
        'kubernetes-kubemark-5-gce',
 | 
			
		||||
        'kubernetes-kubemark-500-gce',
 | 
			
		||||
        'kubernetes-test-go',
 | 
			
		||||
    }
 | 
			
		||||
    blocking_suite_metadata = {
 | 
			
		||||
        k: v for (k, v) in suite_metadata.items() if k[0] in blocking}
 | 
			
		||||
 | 
			
		||||
    return prefix_metadata, suite_metadata, blocking_suite_metadata
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def write_index(outdir, prefixes, suites, blockers):
 | 
			
		||||
    """Write the index.html with links to each view, including stat summaries.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
      outdir: the path to write the index.html file
 | 
			
		||||
      prefixes: the {(prefix, path): TestMetadata} map
 | 
			
		||||
      suites: the {(suite, path): TestMetadata} map
 | 
			
		||||
      blockers: the {(suite, path): TestMetadata} map of blocking suites
 | 
			
		||||
    """
 | 
			
		||||
    html = html_header(title='Kubernetes Test Summary', script=False)
 | 
			
		||||
    html.append('<h1>Kubernetes Tests</h1>')
 | 
			
		||||
    html.append('Last updated %s' % time.strftime('%F'))
 | 
			
		||||
 | 
			
		||||
    html.append('<h2>Tests from suites starting with:</h2>')
 | 
			
		||||
    html.extend(gen_metadata_links(prefixes))
 | 
			
		||||
 | 
			
		||||
    html.append('<h2>Blocking suites:</h2>')
 | 
			
		||||
    html.extend(gen_metadata_links(blockers))
 | 
			
		||||
 | 
			
		||||
    html.append('<h2>All suites:</h2>')
 | 
			
		||||
    html.extend(gen_metadata_links(suites))
 | 
			
		||||
 | 
			
		||||
    html.extend(['</body>', '</html>'])
 | 
			
		||||
    write_html(outdir, 'index.html', '\n'.join(html))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main(infile, outdir):
 | 
			
		||||
    """Use infile to write test, suite and index html files to outdir."""
 | 
			
		||||
    prefixes, suites, blockers = write_metadata(infile, outdir)
 | 
			
		||||
    write_index(outdir, prefixes, suites, blockers)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_options(argv):
 | 
			
		||||
    """Process command line arguments."""
 | 
			
		||||
    parser = argparse.ArgumentParser()
 | 
			
		||||
    parser.add_argument('--output-dir', required=True,
 | 
			
		||||
                        help='where to write output pages')
 | 
			
		||||
    parser.add_argument('--input', required=True,
 | 
			
		||||
                        help='JSON test data to read for input')
 | 
			
		||||
    return parser.parse_args(argv)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
    OPTIONS = get_options(sys.argv[1:])
 | 
			
		||||
    main(OPTIONS.input, OPTIONS.output_dir)
 | 
			
		||||
@@ -1,139 +0,0 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
 | 
			
		||||
# Copyright 2016 The Kubernetes Authors All rights reserved.
 | 
			
		||||
#
 | 
			
		||||
# Licensed under the Apache License, Version 2.0 (the "License");
 | 
			
		||||
# you may not use this file except in compliance with the License.
 | 
			
		||||
# You may obtain a copy of the License at
 | 
			
		||||
#
 | 
			
		||||
#     http://www.apache.org/licenses/LICENSE-2.0
 | 
			
		||||
#
 | 
			
		||||
# Unless required by applicable law or agreed to in writing, software
 | 
			
		||||
# distributed under the License is distributed on an "AS IS" BASIS,
 | 
			
		||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
			
		||||
# See the License for the specific language governing permissions and
 | 
			
		||||
# limitations under the License.
 | 
			
		||||
 | 
			
		||||
"""Tests for gen_html."""
 | 
			
		||||
 | 
			
		||||
import json
 | 
			
		||||
import os
 | 
			
		||||
import shutil
 | 
			
		||||
import tempfile
 | 
			
		||||
import unittest
 | 
			
		||||
 | 
			
		||||
import gen_html
 | 
			
		||||
 | 
			
		||||
TEST_DATA = {
 | 
			
		||||
  'test1':
 | 
			
		||||
      {'kubernetes-release': [{'build': 3, 'failed': False, 'time': 3.52},
 | 
			
		||||
                              {'build': 4, 'failed': True, 'time': 63.21}],
 | 
			
		||||
       'kubernetes-debug': [{'build': 5, 'failed': False, 'time': 7.56},
 | 
			
		||||
                            {'build': 6, 'failed': False, 'time': 8.43}],
 | 
			
		||||
      },
 | 
			
		||||
  'test2':
 | 
			
		||||
      {'kubernetes-debug': [{'build': 6, 'failed': True, 'time': 3.53}]},
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
class GenHtmlTest(unittest.TestCase):
 | 
			
		||||
    """Unit tests for gen_html.py."""
 | 
			
		||||
    # pylint: disable=invalid-name
 | 
			
		||||
 | 
			
		||||
    def testHtmlHeader_NoScript(self):
 | 
			
		||||
        result = '\n'.join(gen_html.html_header('', False))
 | 
			
		||||
        self.assertNotIn('<script', result)
 | 
			
		||||
 | 
			
		||||
    def testHtmlHeader_NoTitle(self):
 | 
			
		||||
        def Test(title):
 | 
			
		||||
            result = '\n'.join(gen_html.html_header(title, False))
 | 
			
		||||
            self.assertNotIn('<title', result)
 | 
			
		||||
        Test('')
 | 
			
		||||
        Test(None)
 | 
			
		||||
 | 
			
		||||
    def testHtmlHeader_Title(self):
 | 
			
		||||
        lines = gen_html.html_header('foo', False)
 | 
			
		||||
        for item in lines:
 | 
			
		||||
          if '<title' in item:
 | 
			
		||||
            self.assertIn('foo', item)
 | 
			
		||||
            break
 | 
			
		||||
        else:
 | 
			
		||||
          self.fail('No foo in: %s' % '\n'.join(lines))
 | 
			
		||||
 | 
			
		||||
    def testHtmlHeader_Script(self):
 | 
			
		||||
        lines = gen_html.html_header('', True)
 | 
			
		||||
        for item in lines:
 | 
			
		||||
          if '<script' in item:
 | 
			
		||||
            break
 | 
			
		||||
        else:
 | 
			
		||||
          self.fail('No script in: %s' % '\n'.join(lines))
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def gen_html(*args):
 | 
			
		||||
        """Call gen_html with TEST_DATA."""
 | 
			
		||||
        return gen_html.gen_html(TEST_DATA, *args)[0]
 | 
			
		||||
 | 
			
		||||
    def testGenHtml(self):
 | 
			
		||||
        """Test that the expected tests and jobs are in the results."""
 | 
			
		||||
        html = self.gen_html('')
 | 
			
		||||
        self.assertIn('test1', html)
 | 
			
		||||
        self.assertIn('test2', html)
 | 
			
		||||
        self.assertIn('release', html)
 | 
			
		||||
        self.assertIn('debug', html)
 | 
			
		||||
 | 
			
		||||
    def testGenHtmlFilter(self):
 | 
			
		||||
        """Test that filtering to just the release jobs works."""
 | 
			
		||||
        html = self.gen_html('release')
 | 
			
		||||
        self.assertIn('release', html)
 | 
			
		||||
        self.assertIn('skipped">\ntest2', html)
 | 
			
		||||
        self.assertNotIn('debug', html)
 | 
			
		||||
 | 
			
		||||
    def testGenHtmlFilterExact(self):
 | 
			
		||||
        """Test that filtering to an exact name works."""
 | 
			
		||||
        html = self.gen_html('release', True)
 | 
			
		||||
        self.assertIn('release', html)
 | 
			
		||||
        self.assertNotIn('debug', html)
 | 
			
		||||
 | 
			
		||||
    def testGetOptions(self):
 | 
			
		||||
        """Test argument parsing works correctly."""
 | 
			
		||||
 | 
			
		||||
        def check(args, expected_output_dir, expected_input):
 | 
			
		||||
            """Check that args is parsed correctly."""
 | 
			
		||||
            options = gen_html.get_options(args)
 | 
			
		||||
            self.assertEquals(expected_output_dir, options.output_dir)
 | 
			
		||||
            self.assertEquals(expected_input, options.input)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
        check(['--output-dir=foo', '--input=bar'], 'foo', 'bar')
 | 
			
		||||
        check(['--output-dir', 'foo', '--input', 'bar'], 'foo', 'bar')
 | 
			
		||||
        check(['--input=bar', '--output-dir=foo'], 'foo', 'bar')
 | 
			
		||||
 | 
			
		||||
    def testGetOptions_Missing(self):
 | 
			
		||||
        """Test missing arguments raise an exception."""
 | 
			
		||||
        def check(args):
 | 
			
		||||
            """Check that args raise an exception."""
 | 
			
		||||
            with self.assertRaises(SystemExit):
 | 
			
		||||
                gen_html.get_options(args)
 | 
			
		||||
 | 
			
		||||
        check([])
 | 
			
		||||
        check(['--output-dir=foo'])
 | 
			
		||||
        check(['--input=bar'])
 | 
			
		||||
 | 
			
		||||
    def testMain(self):
 | 
			
		||||
        """Test main() creates pages."""
 | 
			
		||||
        temp_dir = tempfile.mkdtemp(prefix='kube-test-hist-')
 | 
			
		||||
        try:
 | 
			
		||||
            tests_json = os.path.join(temp_dir, 'tests.json')
 | 
			
		||||
            with open(tests_json, 'w') as buf:
 | 
			
		||||
                json.dump(TEST_DATA, buf)
 | 
			
		||||
            gen_html.main(tests_json, temp_dir)
 | 
			
		||||
            for page in (
 | 
			
		||||
                    'index',
 | 
			
		||||
                    'tests-kubernetes',
 | 
			
		||||
                    'suite-kubernetes-release',
 | 
			
		||||
                    'suite-kubernetes-debug'):
 | 
			
		||||
                self.assertTrue(os.path.exists('%s/%s.html' % (temp_dir, page)))
 | 
			
		||||
        finally:
 | 
			
		||||
            shutil.rmtree(temp_dir)
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
    unittest.main()
 | 
			
		||||
@@ -1,220 +0,0 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
 | 
			
		||||
# Copyright 2016 The Kubernetes Authors All rights reserved.
 | 
			
		||||
#
 | 
			
		||||
# Licensed under the Apache License, Version 2.0 (the "License");
 | 
			
		||||
# you may not use this file except in compliance with the License.
 | 
			
		||||
# You may obtain a copy of the License at
 | 
			
		||||
#
 | 
			
		||||
#     http://www.apache.org/licenses/LICENSE-2.0
 | 
			
		||||
#
 | 
			
		||||
# Unless required by applicable law or agreed to in writing, software
 | 
			
		||||
# distributed under the License is distributed on an "AS IS" BASIS,
 | 
			
		||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
			
		||||
# See the License for the specific language governing permissions and
 | 
			
		||||
# limitations under the License.
 | 
			
		||||
 | 
			
		||||
"""Generates a JSON file containing test history for the last day.
 | 
			
		||||
 | 
			
		||||
Writes the JSON out to tests.json.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import print_function
 | 
			
		||||
 | 
			
		||||
import argparse
 | 
			
		||||
import json
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
import time
 | 
			
		||||
import urllib2
 | 
			
		||||
from xml.etree import ElementTree
 | 
			
		||||
import zlib
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_json(url):
 | 
			
		||||
    """Does an HTTP GET to url and parses the JSON response. None on failure."""
 | 
			
		||||
    try:
 | 
			
		||||
        content = urllib2.urlopen(url).read().decode('utf-8')
 | 
			
		||||
        return json.loads(content)
 | 
			
		||||
    except urllib2.HTTPError:
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_jobs(server):
 | 
			
		||||
    """Generates all job names running on the server."""
 | 
			
		||||
    jenkins_json = get_json('{}/api/json'.format(server))
 | 
			
		||||
    if not jenkins_json:
 | 
			
		||||
        return
 | 
			
		||||
    for job in jenkins_json['jobs']:
 | 
			
		||||
        yield job['name']
 | 
			
		||||
 | 
			
		||||
def get_builds(server, job):
 | 
			
		||||
    """Generates all build numbers for a given job."""
 | 
			
		||||
    job_json = get_json('{}/job/{}/api/json'.format(server, job))
 | 
			
		||||
    if not job_json:
 | 
			
		||||
        return
 | 
			
		||||
    for build in job_json['builds']:
 | 
			
		||||
        yield build['number']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_build_info(server, job, build):
 | 
			
		||||
    """Returns building status along with timestamp for a given build."""
 | 
			
		||||
    path = '{}/job/{}/{}/api/json'.format(server, job, str(build))
 | 
			
		||||
    build_json = get_json(path)
 | 
			
		||||
    if not build_json:
 | 
			
		||||
        return True, 0
 | 
			
		||||
    return build_json['building'], build_json['timestamp']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def gcs_ls(path):
 | 
			
		||||
    """Lists objects under a path on gcs."""
 | 
			
		||||
    try:
 | 
			
		||||
        result = subprocess.check_output(
 | 
			
		||||
            ['gsutil', 'ls', path],
 | 
			
		||||
            stderr=open(os.devnull, 'w'))
 | 
			
		||||
    except subprocess.CalledProcessError:
 | 
			
		||||
        result = b''
 | 
			
		||||
    for subpath in result.decode('utf-8').split():
 | 
			
		||||
        yield subpath
 | 
			
		||||
 | 
			
		||||
def gcs_ls_build(job, build):
 | 
			
		||||
    """Lists all files under a given job and build path."""
 | 
			
		||||
    url = 'gs://kubernetes-jenkins/logs/{}/{}'.format(job, str(build))
 | 
			
		||||
    for path in gcs_ls(url):
 | 
			
		||||
        yield path
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def gcs_ls_artifacts(job, build):
 | 
			
		||||
    """Lists all artifacts for a build."""
 | 
			
		||||
    for path in gcs_ls_build(job, build):
 | 
			
		||||
        if path.endswith('artifacts/'):
 | 
			
		||||
            for artifact in gcs_ls(path):
 | 
			
		||||
                yield artifact
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def gcs_ls_junit_paths(job, build):
 | 
			
		||||
    """Lists the paths of JUnit XML files for a build."""
 | 
			
		||||
    for path in gcs_ls_artifacts(job, build):
 | 
			
		||||
        if re.match(r'.*/junit.*\.xml$', path):
 | 
			
		||||
            yield path
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def gcs_get_tests(path):
 | 
			
		||||
    """Generates test data out of the provided JUnit path.
 | 
			
		||||
 | 
			
		||||
    Returns None if there's an issue parsing the XML.
 | 
			
		||||
    Yields name, time, failed, skipped for each test.
 | 
			
		||||
    """
 | 
			
		||||
    try:
 | 
			
		||||
        data = subprocess.check_output(
 | 
			
		||||
            ['gsutil', 'cat', path], stderr=open(os.devnull, 'w'))
 | 
			
		||||
    except subprocess.CalledProcessError:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        data = zlib.decompress(data, zlib.MAX_WBITS | 16)
 | 
			
		||||
    except zlib.error:
 | 
			
		||||
        # Don't fail if it's not gzipped.
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        root = ElementTree.fromstring(data)
 | 
			
		||||
    except ElementTree.ParseError:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    for child in root:
 | 
			
		||||
        name = child.attrib['name']
 | 
			
		||||
        ctime = float(child.attrib['time'])
 | 
			
		||||
        failed = False
 | 
			
		||||
        skipped = False
 | 
			
		||||
        for param in child:
 | 
			
		||||
            if param.tag == 'skipped':
 | 
			
		||||
                skipped = True
 | 
			
		||||
            elif param.tag == 'failure':
 | 
			
		||||
                failed = True
 | 
			
		||||
        yield name, ctime, failed, skipped
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_tests_from_junit_path(path):
 | 
			
		||||
    """Generates all tests in a JUnit GCS path."""
 | 
			
		||||
    for test in gcs_get_tests(path):
 | 
			
		||||
        if not test:
 | 
			
		||||
            continue
 | 
			
		||||
        yield test
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_tests_from_build(job, build):
 | 
			
		||||
    """Generates all tests for a build."""
 | 
			
		||||
    for junit_path in gcs_ls_junit_paths(job, build):
 | 
			
		||||
        for test in get_tests_from_junit_path(junit_path):
 | 
			
		||||
            yield test
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_daily_builds(server, matcher):
 | 
			
		||||
    """Generates all (job, build) pairs for the last day."""
 | 
			
		||||
    now = time.time()
 | 
			
		||||
    for job in get_jobs(server):
 | 
			
		||||
        if not matcher(job):
 | 
			
		||||
            continue
 | 
			
		||||
        for build in reversed(sorted(get_builds(server, job))):
 | 
			
		||||
            building, timestamp = get_build_info(server, job, build)
 | 
			
		||||
            # Skip if it's still building.
 | 
			
		||||
            if building:
 | 
			
		||||
                continue
 | 
			
		||||
            # Quit once we've walked back over a day.
 | 
			
		||||
            if now - timestamp / 1000 > 60*60*24:
 | 
			
		||||
                break
 | 
			
		||||
            yield job, build
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_tests(server, matcher):
 | 
			
		||||
    """Returns a dictionary of tests to be JSON encoded."""
 | 
			
		||||
    tests = {}
 | 
			
		||||
    for job, build in get_daily_builds(server, matcher):
 | 
			
		||||
        print('{}/{}'.format(job, str(build)))
 | 
			
		||||
        for name, duration, failed, skipped in get_tests_from_build(job, build):
 | 
			
		||||
            if name not in tests:
 | 
			
		||||
                tests[name] = {}
 | 
			
		||||
            if skipped:
 | 
			
		||||
                continue
 | 
			
		||||
            if job not in tests[name]:
 | 
			
		||||
                tests[name][job] = []
 | 
			
		||||
            tests[name][job].append({
 | 
			
		||||
                'build': build,
 | 
			
		||||
                'failed': failed,
 | 
			
		||||
                'time': duration
 | 
			
		||||
            })
 | 
			
		||||
    return tests
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main(server, match):
 | 
			
		||||
    """Collect test info in matching jobs."""
 | 
			
		||||
    print('Finding tests in jobs matching {} at server {}'.format(
 | 
			
		||||
        match, server))
 | 
			
		||||
    matcher = re.compile(match).match
 | 
			
		||||
    tests = get_tests(server, matcher)
 | 
			
		||||
    with open('tests.json', 'w') as buf:
 | 
			
		||||
        json.dump(tests, buf, sort_keys=True)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_options(argv):
 | 
			
		||||
    """Process command line arguments."""
 | 
			
		||||
    parser = argparse.ArgumentParser()
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        '--server',
 | 
			
		||||
        help='hostname of jenkins server',
 | 
			
		||||
        required=True,
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        '--match',
 | 
			
		||||
        help='filter to job names matching this re',
 | 
			
		||||
        required=True,
 | 
			
		||||
    )
 | 
			
		||||
    return parser.parse_args(argv)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
    OPTIONS = get_options(sys.argv[1:])
 | 
			
		||||
    main(OPTIONS.server, OPTIONS.match)
 | 
			
		||||
@@ -1,55 +0,0 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
 | 
			
		||||
# Copyright 2016 The Kubernetes Authors All rights reserved.
 | 
			
		||||
#
 | 
			
		||||
# Licensed under the Apache License, Version 2.0 (the "License");
 | 
			
		||||
# you may not use this file except in compliance with the License.
 | 
			
		||||
# You may obtain a copy of the License at
 | 
			
		||||
#
 | 
			
		||||
#     http://www.apache.org/licenses/LICENSE-2.0
 | 
			
		||||
#
 | 
			
		||||
# Unless required by applicable law or agreed to in writing, software
 | 
			
		||||
# distributed under the License is distributed on an "AS IS" BASIS,
 | 
			
		||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
			
		||||
# See the License for the specific language governing permissions and
 | 
			
		||||
# limitations under the License.
 | 
			
		||||
 | 
			
		||||
"""Tests for gen_json."""
 | 
			
		||||
 | 
			
		||||
import unittest
 | 
			
		||||
 | 
			
		||||
import gen_json
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class GenJsonTest(unittest.TestCase):
 | 
			
		||||
    """Unit tests for gen_json.py."""
 | 
			
		||||
    # pylint: disable=invalid-name
 | 
			
		||||
 | 
			
		||||
    def testGetOptions(self):
 | 
			
		||||
        """Test argument parsing works correctly."""
 | 
			
		||||
        def check(args, expected_server, expected_match):
 | 
			
		||||
            """Check that all args are parsed as expected."""
 | 
			
		||||
            options = gen_json.get_options(args)
 | 
			
		||||
            self.assertEquals(expected_server, options.server)
 | 
			
		||||
            self.assertEquals(expected_match, options.match)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
        check(['--server=foo', '--match=bar'], 'foo', 'bar')
 | 
			
		||||
        check(['--server', 'foo', '--match', 'bar'], 'foo', 'bar')
 | 
			
		||||
        check(['--match=bar', '--server=foo'], 'foo', 'bar')
 | 
			
		||||
 | 
			
		||||
    def testGetOptions_Missing(self):
 | 
			
		||||
        """Test missing arguments raise an exception."""
 | 
			
		||||
        def check(args):
 | 
			
		||||
            """Check that missing args raise an exception."""
 | 
			
		||||
            with self.assertRaises(SystemExit):
 | 
			
		||||
                gen_json.get_options(args)
 | 
			
		||||
 | 
			
		||||
        check([])
 | 
			
		||||
        check(['--server=foo'])
 | 
			
		||||
        check(['--match=bar'])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
    unittest.main()
 | 
			
		||||
@@ -1,23 +0,0 @@
 | 
			
		||||
function toggle(cls) {
 | 
			
		||||
	var els = document.getElementsByClassName(cls);
 | 
			
		||||
	var show = false
 | 
			
		||||
	for (var i = 0; i < els.length; i++) {
 | 
			
		||||
		if (els[i].className == 'test ' + cls) {
 | 
			
		||||
			if (els[i].style.display == 'none') {
 | 
			
		||||
				els[i].style.display = 'block';
 | 
			
		||||
				show = true;
 | 
			
		||||
			} else {
 | 
			
		||||
				els[i].style.display = 'none';
 | 
			
		||||
				show = false;
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	}
 | 
			
		||||
	// UGLY HACK
 | 
			
		||||
	document.getElementsByClassName('total ' + cls)[0].style.color = show ? '#000000' : '#888888';
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
function defaultToggles() {
 | 
			
		||||
	toggle('okay');
 | 
			
		||||
	toggle('skipped');
 | 
			
		||||
}
 | 
			
		||||
window.onload = defaultToggles;
 | 
			
		||||
@@ -1,121 +0,0 @@
 | 
			
		||||
body {
 | 
			
		||||
    margin: 0;
 | 
			
		||||
    padding: 0;
 | 
			
		||||
    font-family: monospace;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#header {
 | 
			
		||||
    width: 100%;
 | 
			
		||||
    font-size: x-large;
 | 
			
		||||
    font-weight: bold;
 | 
			
		||||
    padding: 10px;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
a.suite-link {
 | 
			
		||||
    font-weight: bold;
 | 
			
		||||
    font-size: large;
 | 
			
		||||
    display: block;
 | 
			
		||||
    text-decoration: none;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
span.total {
 | 
			
		||||
    display: inline-block;
 | 
			
		||||
    width: 60px;
 | 
			
		||||
    padding-left: 30px;
 | 
			
		||||
    border-radius: 10px;
 | 
			
		||||
    margin: 5px;
 | 
			
		||||
    cursor: pointer;
 | 
			
		||||
    user-select: none;
 | 
			
		||||
    -webkit-user-select: none;
 | 
			
		||||
    -moz-user-select: none;
 | 
			
		||||
    -ms-user-select: none;
 | 
			
		||||
    user-select: none;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
span.total.okay {
 | 
			
		||||
    background: linear-gradient(to left, #e3e3e3 80%, #009900 20%);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
span.total.unstable {
 | 
			
		||||
    background: linear-gradient(to left, #e3e3e3 80%, #EE9500 20%);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
span.total.failed {
 | 
			
		||||
    background: linear-gradient(to left, #e3e3e3 80%, #AA0000 20%);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
span.total.skipped {
 | 
			
		||||
    background: linear-gradient(to left, #e3e3e3 80%, #999999 20%);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
ul.test {
 | 
			
		||||
    list-style-type: none;
 | 
			
		||||
    padding: 0 0 5px 5px;
 | 
			
		||||
    width: 100%;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
li.test {
 | 
			
		||||
    margin: 0 0 10px 0;
 | 
			
		||||
    border-top-left-radius: 10px;
 | 
			
		||||
    border-bottom-left-radius: 10px;
 | 
			
		||||
    padding: 5px;
 | 
			
		||||
    font-size: large;
 | 
			
		||||
    font-weight: bold;
 | 
			
		||||
    padding-left: 4%;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
li.test.okay {
 | 
			
		||||
    background: linear-gradient(to left, #e3e3e3 97%, #009900 3%);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
li.test.unstable {
 | 
			
		||||
    background: linear-gradient(to left, #e3e3e3 97%, #EE9500 3%);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
li.test.failed {
 | 
			
		||||
    background: linear-gradient(to left, #e3e3e3 97%, #AA0000 3%);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
li.test.skipped {
 | 
			
		||||
    background: linear-gradient(to left, #e3e3e3 97%, #999999 3%);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
ul.suite {
 | 
			
		||||
    list-style: none;
 | 
			
		||||
    margin: 2px 0 0 0;
 | 
			
		||||
    padding-left: 20px;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
li.suite {
 | 
			
		||||
    margin: 0 0 2px 0;
 | 
			
		||||
    font-size: normal;
 | 
			
		||||
    font-weight: normal;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
li.suite span {
 | 
			
		||||
    float: left;
 | 
			
		||||
    width: 70px;
 | 
			
		||||
    font-weight: bold;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
li.test span.time {
 | 
			
		||||
    width: 50px;
 | 
			
		||||
    font-weight: normal;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
li.test span.okay {
 | 
			
		||||
    color: green;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
li.test span.unstable {
 | 
			
		||||
    color: orange;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
li.test span.failed {
 | 
			
		||||
    color: red;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
li.test>span {
 | 
			
		||||
    display: inline-block;
 | 
			
		||||
    text-align: right;
 | 
			
		||||
}
 | 
			
		||||
		Reference in New Issue
	
	Block a user