Merge lp:~laurynas-biveinis/percona-server/bug1183625-5.5 into lp:percona-server/5.5

Proposed by Laurynas Biveinis
Status: Merged
Approved by: Stewart Smith
Approved revision: no longer in the source branch.
Merged at revision: 527
Proposed branch: lp:~laurynas-biveinis/percona-server/bug1183625-5.5
Merge into: lp:percona-server/5.5
Prerequisite: lp:~laurynas-biveinis/percona-server/tree-fixes-5.5
Diff against target: 19430 lines (+18482/-463) (has conflicts)
91 files modified
COPYING.show_temp_51 (+0/-13)
Percona-Server/mysql-test/include/have_response_time_distribution.inc (+4/-0)
Percona-Server/mysql-test/include/percona_query_cache_with_comments_end.inc (+3/-0)
Percona-Server/mysql-test/include/query_response_time.inc (+0/-43)
Percona-Server/mysql-test/r/percona_innodb_deadlock_count.result (+0/-28)
Percona-Server/mysql-test/r/percona_innodb_use_sys_stats_table.result (+0/-3)
Percona-Server/mysql-test/r/percona_log_connection_error.result (+16/-0)
Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config_foo.result (+0/-7)
Percona-Server/mysql-test/r/percona_query_cache_with_comments_crash.result (+0/-21)
Percona-Server/mysql-test/r/percona_query_response_time.result (+1307/-0)
Percona-Server/mysql-test/r/percona_userstat.result (+80/-10)
Percona-Server/mysql-test/r/percona_xtradb_admin_command.result (+0/-6)
Percona-Server/mysql-test/r/percona_xtradb_bug317074.result (+5/-0)
Percona-Server/mysql-test/t/percona_bug643149.test (+49/-0)
Percona-Server/mysql-test/t/percona_bug933969.test (+0/-42)
Percona-Server/mysql-test/t/percona_log_slow_verbosity-cl.test (+2/-0)
Percona-Server/mysql-test/t/percona_query_cache_with_comments_prepared_statements.test (+208/-0)
Percona-Server/mysql-test/t/percona_server_variables_debug.test (+0/-2)
Percona-Server/mysql-test/t/percona_server_variables_release.test (+2/-0)
Percona-Server/mysql-test/t/percona_sql_no_fcache.test (+11/-0)
Percona-Server/mysql-test/t/percona_userstat.test (+81/-6)
build/debian/copyright (+0/-86)
doc/source/compatibility.rst (+27/-0)
doc/source/diagnostics/innodb_stats.rst (+234/-0)
doc/source/flexibility/innodb_fast_shutdown.rst (+0/-36)
doc/source/management/innodb_expand_import.rst (+0/-160)
python-for-subunit2junitxml/BytesIO.py (+136/-0)
python-for-subunit2junitxml/iso8601/LICENSE (+20/-0)
python-for-subunit2junitxml/iso8601/README (+26/-0)
python-for-subunit2junitxml/iso8601/README.subunit (+5/-0)
python-for-subunit2junitxml/iso8601/setup.py (+58/-0)
python-for-subunit2junitxml/iso8601/test_iso8601.py (+111/-0)
python-for-subunit2junitxml/junitxml/__init__.py (+221/-0)
python-for-subunit2junitxml/junitxml/tests/__init__.py (+16/-0)
python-for-subunit2junitxml/junitxml/tests/test_junitxml.py (+327/-0)
python-for-subunit2junitxml/subunit/__init__.py (+1250/-0)
python-for-subunit2junitxml/subunit/chunked.py (+185/-0)
python-for-subunit2junitxml/subunit/details.py (+119/-0)
python-for-subunit2junitxml/subunit/iso8601.py (+133/-0)
python-for-subunit2junitxml/subunit/progress_model.py (+106/-0)
python-for-subunit2junitxml/subunit/run.py (+73/-0)
python-for-subunit2junitxml/subunit/test_results.py (+492/-0)
python-for-subunit2junitxml/subunit/tests/TestUtil.py (+80/-0)
python-for-subunit2junitxml/subunit/tests/__init__.py (+41/-0)
python-for-subunit2junitxml/subunit/tests/sample-script.py (+21/-0)
python-for-subunit2junitxml/subunit/tests/sample-two-script.py (+7/-0)
python-for-subunit2junitxml/subunit/tests/test_chunked.py (+152/-0)
python-for-subunit2junitxml/subunit/tests/test_details.py (+112/-0)
python-for-subunit2junitxml/subunit/tests/test_progress_model.py (+118/-0)
python-for-subunit2junitxml/subunit/tests/test_subunit_filter.py (+208/-0)
python-for-subunit2junitxml/subunit/tests/test_subunit_stats.py (+84/-0)
python-for-subunit2junitxml/subunit/tests/test_subunit_tags.py (+69/-0)
python-for-subunit2junitxml/subunit/tests/test_tap2subunit.py (+445/-0)
python-for-subunit2junitxml/subunit/tests/test_test_protocol.py (+1299/-0)
python-for-subunit2junitxml/subunit/tests/test_test_results.py (+300/-0)
python-for-subunit2junitxml/testtools/__init__.py (+80/-0)
python-for-subunit2junitxml/testtools/_spinner.py (+316/-0)
python-for-subunit2junitxml/testtools/compat.py (+286/-0)
python-for-subunit2junitxml/testtools/content.py (+238/-0)
python-for-subunit2junitxml/testtools/content_type.py (+33/-0)
python-for-subunit2junitxml/testtools/deferredruntest.py (+335/-0)
python-for-subunit2junitxml/testtools/distutilscmd.py (+62/-0)
python-for-subunit2junitxml/testtools/helpers.py (+64/-0)
python-for-subunit2junitxml/testtools/matchers.py (+785/-0)
python-for-subunit2junitxml/testtools/monkey.py (+97/-0)
python-for-subunit2junitxml/testtools/run.py (+332/-0)
python-for-subunit2junitxml/testtools/runtest.py (+200/-0)
python-for-subunit2junitxml/testtools/testcase.py (+724/-0)
python-for-subunit2junitxml/testtools/testresult/__init__.py (+19/-0)
python-for-subunit2junitxml/testtools/testresult/doubles.py (+111/-0)
python-for-subunit2junitxml/testtools/testresult/real.py (+621/-0)
python-for-subunit2junitxml/testtools/tests/__init__.py (+44/-0)
python-for-subunit2junitxml/testtools/tests/helpers.py (+72/-0)
python-for-subunit2junitxml/testtools/tests/test_compat.py (+257/-0)
python-for-subunit2junitxml/testtools/tests/test_content.py (+223/-0)
python-for-subunit2junitxml/testtools/tests/test_content_type.py (+46/-0)
python-for-subunit2junitxml/testtools/tests/test_deferredruntest.py (+738/-0)
python-for-subunit2junitxml/testtools/tests/test_distutilscmd.py (+90/-0)
python-for-subunit2junitxml/testtools/tests/test_fixturesupport.py (+79/-0)
python-for-subunit2junitxml/testtools/tests/test_helpers.py (+106/-0)
python-for-subunit2junitxml/testtools/tests/test_matchers.py (+695/-0)
python-for-subunit2junitxml/testtools/tests/test_monkey.py (+167/-0)
python-for-subunit2junitxml/testtools/tests/test_run.py (+77/-0)
python-for-subunit2junitxml/testtools/tests/test_runtest.py (+300/-0)
python-for-subunit2junitxml/testtools/tests/test_spinner.py (+332/-0)
python-for-subunit2junitxml/testtools/tests/test_testresult.py (+1372/-0)
python-for-subunit2junitxml/testtools/tests/test_testsuite.py (+53/-0)
python-for-subunit2junitxml/testtools/tests/test_testtools.py (+1143/-0)
python-for-subunit2junitxml/testtools/tests/test_with_with.py (+42/-0)
python-for-subunit2junitxml/testtools/testsuite.py (+87/-0)
python-for-subunit2junitxml/testtools/utils.py (+13/-0)
Conflict: can't delete UDF.moved because it is not empty.  Not deleting.
Conflict adding file UDF.  Moved existing file to UDF.moved.
Conflict because UDF.moved is not versioned, but has versioned children.  Versioned directory.
Conflict adding file doc/source/performance/innodb_lazy_drop_table.rst.  Moved existing file to doc/source/performance/innodb_lazy_drop_table.rst.moved.
To merge this branch: bzr merge lp:~laurynas-biveinis/percona-server/bug1183625-5.5
Reviewer Review Type Date Requested Status
Stewart Smith (community) Approve
Review via email: mp+165967@code.launchpad.net

Description of the change

Merge percona_userstat testcase from 5.1, partially addressing bug
1179534
, and containing the testcase for bug 1183625.

http://jenkins.percona.com/job/percona-server-5.5-param/746/

To post a comment you must log in.
Revision history for this message
Laurynas Biveinis (laurynas-biveinis) wrote :

The conflicts will be addressed at the merge time.

Revision history for this message
Stewart Smith (stewart) wrote :

I used "bzr merge" and "bzr diff" locally to review, as LP seems to have gotten it really wrong with the diff.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== removed file 'COPYING.show_temp_51'
--- COPYING.show_temp_51 2013-05-28 06:50:44 +0000
+++ COPYING.show_temp_51 1970-01-01 00:00:00 +0000
@@ -1,13 +0,0 @@
1Portions of this software contain modifications contributed by Venu Anuganti.
2These contributions are used with the following license:
3
4Copyright (c) 2010, Venu Anuganti, http://venublog.com/
5All rights reserved.
6
7Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
8
9 * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
10 * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
11 * Neither the name of the <ORGANIZATION> nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
12
13THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
140
=== added file 'Percona-Server/mysql-test/include/have_response_time_distribution.inc'
--- Percona-Server/mysql-test/include/have_response_time_distribution.inc 1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/include/have_response_time_distribution.inc 2013-05-28 06:50:46 +0000
@@ -0,0 +1,4 @@
1-- require r/have_response_time_distribution.require
2disable_query_log;
3show variables like 'have_response_time_distribution';
4enable_query_log;
05
=== added file 'Percona-Server/mysql-test/include/percona_query_cache_with_comments_end.inc'
--- Percona-Server/mysql-test/include/percona_query_cache_with_comments_end.inc 1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/include/percona_query_cache_with_comments_end.inc 2013-05-28 06:50:46 +0000
@@ -0,0 +1,3 @@
1DROP TABLE t1;
2SET GLOBAL query_cache_size=default;
3set global query_cache_strip_comments=OFF;
04
=== removed file 'Percona-Server/mysql-test/include/query_response_time.inc'
--- Percona-Server/mysql-test/include/query_response_time.inc 2013-05-28 06:50:44 +0000
+++ Percona-Server/mysql-test/include/query_response_time.inc 1970-01-01 00:00:00 +0000
@@ -1,43 +0,0 @@
1SET SESSION query_exec_time=0.1;
2
3SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
4EVAL SET GLOBAL QUERY_RESPONSE_TIME_RANGE_BASE=$base;
5FLUSH QUERY_RESPONSE_TIME;
6# Following two queries check works of FLUSH and
7# respecting of "QUERY_RESPONSE_TIME_STATS" variable (see launchpad bug #855312)
8SHOW QUERY_RESPONSE_TIME;
9SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
10SET GLOBAL QUERY_RESPONSE_TIME_STATS=1;
11
12SET SESSION query_exec_time=0.31; SELECT 1;
13SET SESSION query_exec_time=0.32; SELECT 1;
14SET SESSION query_exec_time=0.33; SELECT 1;
15SET SESSION query_exec_time=0.34; SELECT 1;
16SET SESSION query_exec_time=0.35; SELECT 1;
17SET SESSION query_exec_time=0.36; SELECT 1;
18SET SESSION query_exec_time=0.37; SELECT 1;
19SET SESSION query_exec_time=0.38; SELECT 1;
20SET SESSION query_exec_time=0.39; SELECT 1;
21SET SESSION query_exec_time=0.4; SELECT 1;
22SET SESSION query_exec_time=1.1; SELECT 1;
23SET SESSION query_exec_time=1.2; SELECT 1;
24SET SESSION query_exec_time=1.3; SELECT 1;
25SET SESSION query_exec_time=1.5; SELECT 1;
26SET SESSION query_exec_time=1.4; SELECT 1;
27SET SESSION query_exec_time=0.5; SELECT 1;
28SET SESSION query_exec_time=2.1; SELECT 1;
29SET SESSION query_exec_time=2.3; SELECT 1;
30SET SESSION query_exec_time=2.5; SELECT 1;
31SET SESSION query_exec_time=3.1; SELECT 1;
32SET SESSION query_exec_time=4.1; SELECT 1;
33SET SESSION query_exec_time=5.1; SELECT 1;
34
35SET SESSION query_exec_time=0.1;
36
37SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
38
39SHOW GLOBAL VARIABLES where Variable_name like 'QUERY_RESPONSE_TIME_RANGE_BASE';
40SHOW QUERY_RESPONSE_TIME;
41SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
42
43SET SESSION query_exec_time=default;
440
=== removed file 'Percona-Server/mysql-test/r/percona_innodb_deadlock_count.result'
--- Percona-Server/mysql-test/r/percona_innodb_deadlock_count.result 2013-05-28 06:50:44 +0000
+++ Percona-Server/mysql-test/r/percona_innodb_deadlock_count.result 1970-01-01 00:00:00 +0000
@@ -1,28 +0,0 @@
1# Establish connection con1 (user=root)
2# Establish connection con2 (user=root)
3# Establish connection con3 (user=root)
4# Drop test table
5drop table if exists t;
6# Create test table
7create table t(a INT PRIMARY KEY, b INT) engine=InnoDB;
8# Insert two rows to test table
9insert into t values(2,1);
10insert into t values(1,2);
11# Switch to connection con1
12BEGIN;
13SELECT b FROM t WHERE a=1 FOR UPDATE;
14# Switch to connection con2
15BEGIN;
16SELECT b FROM t WHERE a=2 FOR UPDATE;
17# Switch to connection con1
18SELECT b FROM t WHERE a=2 FOR UPDATE;
19# Switch to connection con2
20SELECT b FROM t WHERE a=1 FOR UPDATE;
21# Switch to connection con1
22ROLLBACK;
23# Switch to connection con2
24ROLLBACK;
25# Switch to connection con3
26Deadlocks: 1
27# Drop test table
28drop table t;
290
=== removed file 'Percona-Server/mysql-test/r/percona_innodb_use_sys_stats_table.result'
--- Percona-Server/mysql-test/r/percona_innodb_use_sys_stats_table.result 2013-05-28 06:50:44 +0000
+++ Percona-Server/mysql-test/r/percona_innodb_use_sys_stats_table.result 1970-01-01 00:00:00 +0000
@@ -1,3 +0,0 @@
1show variables like 'innodb_use_sys_stats%';
2Variable_name Value
3innodb_use_sys_stats_table ON
40
=== added file 'Percona-Server/mysql-test/r/percona_log_connection_error.result'
--- Percona-Server/mysql-test/r/percona_log_connection_error.result 1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/r/percona_log_connection_error.result 2013-05-28 06:50:46 +0000
@@ -0,0 +1,16 @@
1SET @old_max_connections = @@max_connections;
2SET @old_log_warnings = @@log_warnings;
3SET GLOBAL max_connections=2;
4SET GLOBAL LOG_WARNINGS = 0;
5connect(localhost,root,,test,port,socket);
6ERROR HY000: Too many connections
7SET GLOBAL LOG_WARNINGS = 1;
8connect(localhost,root,,test,port,socket);
9ERROR HY000: Too many connections
10SET GLOBAL LOG_WARNINGS = 0;
11connect(localhost,root,,test,port,socket);
12ERROR HY000: Too many connections
13SET GLOBAL max_connections = @old_max_connections;
14SET GLOBAL log_warnings = @old_log_warnings;
15[log_grep.inc] file: percona.log_connection_error.err pattern: Too many connections
16[log_grep.inc] lines: 1
017
=== removed file 'Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config_foo.result'
--- Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config_foo.result 2013-05-28 06:50:44 +0000
+++ Percona-Server/mysql-test/r/percona_log_slow_admin_statements-config_foo.result 1970-01-01 00:00:00 +0000
@@ -1,7 +0,0 @@
1call mtr.add_suppression("option 'log_slow_admin_statements': boolean value 'foo' wasn't recognized. Set to OFF.");
2SHOW GLOBAL VARIABLES like 'log_slow_admin_statements';
3Variable_name Value
4log_slow_admin_statements OFF
5SELECT * FROM INFORMATION_SCHEMA.GLOBAL_VARIABLES WHERE VARIABLE_NAME='log_slow_admin_statements';
6VARIABLE_NAME VARIABLE_VALUE
7LOG_SLOW_ADMIN_STATEMENTS OFF
80
=== removed file 'Percona-Server/mysql-test/r/percona_query_cache_with_comments_crash.result'
--- Percona-Server/mysql-test/r/percona_query_cache_with_comments_crash.result 2013-05-28 06:50:44 +0000
+++ Percona-Server/mysql-test/r/percona_query_cache_with_comments_crash.result 1970-01-01 00:00:00 +0000
@@ -1,21 +0,0 @@
1set GLOBAL query_cache_size=1355776;
2drop table if exists t1;
3create table t1 (a int not null);
4insert into t1 values (1),(2),(3);
5flush query cache;
6flush query cache;
7reset query cache;
8flush status;
9( select * from t1 );
10a
111
122
133
14/*!40101 SET @OLD_SQL_MODE := @@SQL_MODE, @@SQL_MODE := REPLACE(REPLACE(@@SQL_MODE, 'ANSI_QUOTES', ''), ',,', ','), @OLD_QUOTE := @@SQL_QUOTE_SHOW_CREATE, @@SQL_QUOTE_SHOW_CREATE := 1 */;
15/* only comment */;
16# only comment
17;
18-- only comment
19;
20DROP TABLE t1;
21SET GLOBAL query_cache_size= default;
220
=== added file 'Percona-Server/mysql-test/r/percona_query_response_time.result'
--- Percona-Server/mysql-test/r/percona_query_response_time.result 1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/r/percona_query_response_time.result 2013-05-28 06:50:46 +0000
@@ -0,0 +1,1307 @@
1SET SESSION query_exec_time=0.1;
2SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
3SET GLOBAL QUERY_RESPONSE_TIME_RANGE_BASE=1;
4Warnings:
5Warning 1292 Truncated incorrect query_response_time_range_base value: '1'
6FLUSH QUERY_RESPONSE_TIME;
7SHOW QUERY_RESPONSE_TIME;
8
9 0.000001 0 0.000000
10 0.000003 0 0.000000
11 0.000007 0 0.000000
12 0.000015 0 0.000000
13 0.000030 0 0.000000
14 0.000061 0 0.000000
15 0.000122 0 0.000000
16 0.000244 0 0.000000
17 0.000488 0 0.000000
18 0.000976 0 0.000000
19 0.001953 0 0.000000
20 0.003906 0 0.000000
21 0.007812 0 0.000000
22 0.015625 0 0.000000
23 0.031250 0 0.000000
24 0.062500 0 0.000000
25 0.125000 0 0.000000
26 0.250000 0 0.000000
27 0.500000 0 0.000000
28 1.000000 0 0.000000
29 2.000000 0 0.000000
30 4.000000 0 0.000000
31 8.000000 0 0.000000
32 16.000000 0 0.000000
33 32.000000 0 0.000000
34 64.000000 0 0.000000
35 128.000000 0 0.000000
36 256.000000 0 0.000000
37 512.000000 0 0.000000
38 1024.000000 0 0.000000
39 2048.000000 0 0.000000
40 4096.000000 0 0.000000
41 8192.000000 0 0.000000
42 16384.000000 0 0.000000
43 32768.000000 0 0.000000
44 65536.000000 0 0.000000
45 131072.000000 0 0.000000
46 262144.000000 0 0.000000
47 524288.000000 0 0.000000
481048576.000000 0 0.000000
492097152.000000 0 0.000000
504194304.000000 0 0.000000
518388608.000000 0 0.000000
52TOO LONG 0 TOO LONG
53SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
54time count total
55 0.000001 0 0.000000
56 0.000003 0 0.000000
57 0.000007 0 0.000000
58 0.000015 0 0.000000
59 0.000030 0 0.000000
60 0.000061 0 0.000000
61 0.000122 0 0.000000
62 0.000244 0 0.000000
63 0.000488 0 0.000000
64 0.000976 0 0.000000
65 0.001953 0 0.000000
66 0.003906 0 0.000000
67 0.007812 0 0.000000
68 0.015625 0 0.000000
69 0.031250 0 0.000000
70 0.062500 0 0.000000
71 0.125000 0 0.000000
72 0.250000 0 0.000000
73 0.500000 0 0.000000
74 1.000000 0 0.000000
75 2.000000 0 0.000000
76 4.000000 0 0.000000
77 8.000000 0 0.000000
78 16.000000 0 0.000000
79 32.000000 0 0.000000
80 64.000000 0 0.000000
81 128.000000 0 0.000000
82 256.000000 0 0.000000
83 512.000000 0 0.000000
84 1024.000000 0 0.000000
85 2048.000000 0 0.000000
86 4096.000000 0 0.000000
87 8192.000000 0 0.000000
88 16384.000000 0 0.000000
89 32768.000000 0 0.000000
90 65536.000000 0 0.000000
91 131072.000000 0 0.000000
92 262144.000000 0 0.000000
93 524288.000000 0 0.000000
941048576.000000 0 0.000000
952097152.000000 0 0.000000
964194304.000000 0 0.000000
978388608.000000 0 0.000000
98TOO LONG 0 TOO LONG
99SET GLOBAL QUERY_RESPONSE_TIME_STATS=1;
100SET SESSION query_exec_time=0.31;
101SELECT 1;
1021
1031
104SET SESSION query_exec_time=0.32;
105SELECT 1;
1061
1071
108SET SESSION query_exec_time=0.33;
109SELECT 1;
1101
1111
112SET SESSION query_exec_time=0.34;
113SELECT 1;
1141
1151
116SET SESSION query_exec_time=0.35;
117SELECT 1;
1181
1191
120SET SESSION query_exec_time=0.36;
121SELECT 1;
1221
1231
124SET SESSION query_exec_time=0.37;
125SELECT 1;
1261
1271
128SET SESSION query_exec_time=0.38;
129SELECT 1;
1301
1311
132SET SESSION query_exec_time=0.39;
133SELECT 1;
1341
1351
136SET SESSION query_exec_time=0.4;
137SELECT 1;
1381
1391
140SET SESSION query_exec_time=1.1;
141SELECT 1;
1421
1431
144SET SESSION query_exec_time=1.2;
145SELECT 1;
1461
1471
148SET SESSION query_exec_time=1.3;
149SELECT 1;
1501
1511
152SET SESSION query_exec_time=1.5;
153SELECT 1;
1541
1551
156SET SESSION query_exec_time=1.4;
157SELECT 1;
1581
1591
160SET SESSION query_exec_time=0.5;
161SELECT 1;
1621
1631
164SET SESSION query_exec_time=2.1;
165SELECT 1;
1661
1671
168SET SESSION query_exec_time=2.3;
169SELECT 1;
1701
1711
172SET SESSION query_exec_time=2.5;
173SELECT 1;
1741
1751
176SET SESSION query_exec_time=3.1;
177SELECT 1;
1781
1791
180SET SESSION query_exec_time=4.1;
181SELECT 1;
1821
1831
184SET SESSION query_exec_time=5.1;
185SELECT 1;
1861
1871
188SET SESSION query_exec_time=0.1;
189SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
190SHOW GLOBAL VARIABLES where Variable_name like 'QUERY_RESPONSE_TIME_RANGE_BASE';
191Variable_name Value
192query_response_time_range_base 2
193SHOW QUERY_RESPONSE_TIME;
194
195 0.000001 24 0.000000
196 0.000003 0 0.000000
197 0.000007 0 0.000000
198 0.000015 0 0.000000
199 0.000030 0 0.000000
200 0.000061 0 0.000000
201 0.000122 0 0.000000
202 0.000244 0 0.000000
203 0.000488 0 0.000000
204 0.000976 0 0.000000
205 0.001953 0 0.000000
206 0.003906 0 0.000000
207 0.007812 0 0.000000
208 0.015625 0 0.000000
209 0.031250 0 0.000000
210 0.062500 0 0.000000
211 0.125000 0 0.000000
212 0.250000 0 0.000000
213 0.500000 10 3.550000
214 1.000000 1 0.500000
215 2.000000 5 6.500000
216 4.000000 4 10.000000
217 8.000000 2 9.199999
218 16.000000 0 0.000000
219 32.000000 0 0.000000
220 64.000000 0 0.000000
221 128.000000 0 0.000000
222 256.000000 0 0.000000
223 512.000000 0 0.000000
224 1024.000000 0 0.000000
225 2048.000000 0 0.000000
226 4096.000000 0 0.000000
227 8192.000000 0 0.000000
228 16384.000000 0 0.000000
229 32768.000000 0 0.000000
230 65536.000000 0 0.000000
231 131072.000000 0 0.000000
232 262144.000000 0 0.000000
233 524288.000000 0 0.000000
2341048576.000000 0 0.000000
2352097152.000000 0 0.000000
2364194304.000000 0 0.000000
2378388608.000000 0 0.000000
238TOO LONG 0 TOO LONG
239SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
240time count total
241 0.000001 24 0.000000
242 0.000003 0 0.000000
243 0.000007 0 0.000000
244 0.000015 0 0.000000
245 0.000030 0 0.000000
246 0.000061 0 0.000000
247 0.000122 0 0.000000
248 0.000244 0 0.000000
249 0.000488 0 0.000000
250 0.000976 0 0.000000
251 0.001953 0 0.000000
252 0.003906 0 0.000000
253 0.007812 0 0.000000
254 0.015625 0 0.000000
255 0.031250 0 0.000000
256 0.062500 0 0.000000
257 0.125000 0 0.000000
258 0.250000 0 0.000000
259 0.500000 10 3.550000
260 1.000000 1 0.500000
261 2.000000 5 6.500000
262 4.000000 4 10.000000
263 8.000000 2 9.199999
264 16.000000 0 0.000000
265 32.000000 0 0.000000
266 64.000000 0 0.000000
267 128.000000 0 0.000000
268 256.000000 0 0.000000
269 512.000000 0 0.000000
270 1024.000000 0 0.000000
271 2048.000000 0 0.000000
272 4096.000000 0 0.000000
273 8192.000000 0 0.000000
274 16384.000000 0 0.000000
275 32768.000000 0 0.000000
276 65536.000000 0 0.000000
277 131072.000000 0 0.000000
278 262144.000000 0 0.000000
279 524288.000000 0 0.000000
2801048576.000000 0 0.000000
2812097152.000000 0 0.000000
2824194304.000000 0 0.000000
2838388608.000000 0 0.000000
284TOO LONG 0 TOO LONG
285SET SESSION query_exec_time=default;
286SET SESSION query_exec_time=0.1;
287SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
288SET GLOBAL QUERY_RESPONSE_TIME_RANGE_BASE=2;
289FLUSH QUERY_RESPONSE_TIME;
290SHOW QUERY_RESPONSE_TIME;
291
292 0.000001 0 0.000000
293 0.000003 0 0.000000
294 0.000007 0 0.000000
295 0.000015 0 0.000000
296 0.000030 0 0.000000
297 0.000061 0 0.000000
298 0.000122 0 0.000000
299 0.000244 0 0.000000
300 0.000488 0 0.000000
301 0.000976 0 0.000000
302 0.001953 0 0.000000
303 0.003906 0 0.000000
304 0.007812 0 0.000000
305 0.015625 0 0.000000
306 0.031250 0 0.000000
307 0.062500 0 0.000000
308 0.125000 0 0.000000
309 0.250000 0 0.000000
310 0.500000 0 0.000000
311 1.000000 0 0.000000
312 2.000000 0 0.000000
313 4.000000 0 0.000000
314 8.000000 0 0.000000
315 16.000000 0 0.000000
316 32.000000 0 0.000000
317 64.000000 0 0.000000
318 128.000000 0 0.000000
319 256.000000 0 0.000000
320 512.000000 0 0.000000
321 1024.000000 0 0.000000
322 2048.000000 0 0.000000
323 4096.000000 0 0.000000
324 8192.000000 0 0.000000
325 16384.000000 0 0.000000
326 32768.000000 0 0.000000
327 65536.000000 0 0.000000
328 131072.000000 0 0.000000
329 262144.000000 0 0.000000
330 524288.000000 0 0.000000
3311048576.000000 0 0.000000
3322097152.000000 0 0.000000
3334194304.000000 0 0.000000
3348388608.000000 0 0.000000
335TOO LONG 0 TOO LONG
336SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
337time count total
338 0.000001 0 0.000000
339 0.000003 0 0.000000
340 0.000007 0 0.000000
341 0.000015 0 0.000000
342 0.000030 0 0.000000
343 0.000061 0 0.000000
344 0.000122 0 0.000000
345 0.000244 0 0.000000
346 0.000488 0 0.000000
347 0.000976 0 0.000000
348 0.001953 0 0.000000
349 0.003906 0 0.000000
350 0.007812 0 0.000000
351 0.015625 0 0.000000
352 0.031250 0 0.000000
353 0.062500 0 0.000000
354 0.125000 0 0.000000
355 0.250000 0 0.000000
356 0.500000 0 0.000000
357 1.000000 0 0.000000
358 2.000000 0 0.000000
359 4.000000 0 0.000000
360 8.000000 0 0.000000
361 16.000000 0 0.000000
362 32.000000 0 0.000000
363 64.000000 0 0.000000
364 128.000000 0 0.000000
365 256.000000 0 0.000000
366 512.000000 0 0.000000
367 1024.000000 0 0.000000
368 2048.000000 0 0.000000
369 4096.000000 0 0.000000
370 8192.000000 0 0.000000
371 16384.000000 0 0.000000
372 32768.000000 0 0.000000
373 65536.000000 0 0.000000
374 131072.000000 0 0.000000
375 262144.000000 0 0.000000
376 524288.000000 0 0.000000
3771048576.000000 0 0.000000
3782097152.000000 0 0.000000
3794194304.000000 0 0.000000
3808388608.000000 0 0.000000
381TOO LONG 0 TOO LONG
382SET GLOBAL QUERY_RESPONSE_TIME_STATS=1;
383SET SESSION query_exec_time=0.31;
384SELECT 1;
3851
3861
387SET SESSION query_exec_time=0.32;
388SELECT 1;
3891
3901
391SET SESSION query_exec_time=0.33;
392SELECT 1;
3931
3941
395SET SESSION query_exec_time=0.34;
396SELECT 1;
3971
3981
399SET SESSION query_exec_time=0.35;
400SELECT 1;
4011
4021
403SET SESSION query_exec_time=0.36;
404SELECT 1;
4051
4061
407SET SESSION query_exec_time=0.37;
408SELECT 1;
4091
4101
411SET SESSION query_exec_time=0.38;
412SELECT 1;
4131
4141
415SET SESSION query_exec_time=0.39;
416SELECT 1;
4171
4181
419SET SESSION query_exec_time=0.4;
420SELECT 1;
4211
4221
423SET SESSION query_exec_time=1.1;
424SELECT 1;
4251
4261
427SET SESSION query_exec_time=1.2;
428SELECT 1;
4291
4301
431SET SESSION query_exec_time=1.3;
432SELECT 1;
4331
4341
435SET SESSION query_exec_time=1.5;
436SELECT 1;
4371
4381
439SET SESSION query_exec_time=1.4;
440SELECT 1;
4411
4421
443SET SESSION query_exec_time=0.5;
444SELECT 1;
4451
4461
447SET SESSION query_exec_time=2.1;
448SELECT 1;
4491
4501
451SET SESSION query_exec_time=2.3;
452SELECT 1;
4531
4541
455SET SESSION query_exec_time=2.5;
456SELECT 1;
4571
4581
459SET SESSION query_exec_time=3.1;
460SELECT 1;
4611
4621
463SET SESSION query_exec_time=4.1;
464SELECT 1;
4651
4661
467SET SESSION query_exec_time=5.1;
468SELECT 1;
4691
4701
471SET SESSION query_exec_time=0.1;
472SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
473SHOW GLOBAL VARIABLES where Variable_name like 'QUERY_RESPONSE_TIME_RANGE_BASE';
474Variable_name Value
475query_response_time_range_base 2
476SHOW QUERY_RESPONSE_TIME;
477
478 0.000001 24 0.000000
479 0.000003 0 0.000000
480 0.000007 0 0.000000
481 0.000015 0 0.000000
482 0.000030 0 0.000000
483 0.000061 0 0.000000
484 0.000122 0 0.000000
485 0.000244 0 0.000000
486 0.000488 0 0.000000
487 0.000976 0 0.000000
488 0.001953 0 0.000000
489 0.003906 0 0.000000
490 0.007812 0 0.000000
491 0.015625 0 0.000000
492 0.031250 0 0.000000
493 0.062500 0 0.000000
494 0.125000 0 0.000000
495 0.250000 0 0.000000
496 0.500000 10 3.550000
497 1.000000 1 0.500000
498 2.000000 5 6.500000
499 4.000000 4 10.000000
500 8.000000 2 9.199999
501 16.000000 0 0.000000
502 32.000000 0 0.000000
503 64.000000 0 0.000000
504 128.000000 0 0.000000
505 256.000000 0 0.000000
506 512.000000 0 0.000000
507 1024.000000 0 0.000000
508 2048.000000 0 0.000000
509 4096.000000 0 0.000000
510 8192.000000 0 0.000000
511 16384.000000 0 0.000000
512 32768.000000 0 0.000000
513 65536.000000 0 0.000000
514 131072.000000 0 0.000000
515 262144.000000 0 0.000000
516 524288.000000 0 0.000000
5171048576.000000 0 0.000000
5182097152.000000 0 0.000000
5194194304.000000 0 0.000000
5208388608.000000 0 0.000000
521TOO LONG 0 TOO LONG
522SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
523time count total
524 0.000001 24 0.000000
525 0.000003 0 0.000000
526 0.000007 0 0.000000
527 0.000015 0 0.000000
528 0.000030 0 0.000000
529 0.000061 0 0.000000
530 0.000122 0 0.000000
531 0.000244 0 0.000000
532 0.000488 0 0.000000
533 0.000976 0 0.000000
534 0.001953 0 0.000000
535 0.003906 0 0.000000
536 0.007812 0 0.000000
537 0.015625 0 0.000000
538 0.031250 0 0.000000
539 0.062500 0 0.000000
540 0.125000 0 0.000000
541 0.250000 0 0.000000
542 0.500000 10 3.550000
543 1.000000 1 0.500000
544 2.000000 5 6.500000
545 4.000000 4 10.000000
546 8.000000 2 9.199999
547 16.000000 0 0.000000
548 32.000000 0 0.000000
549 64.000000 0 0.000000
550 128.000000 0 0.000000
551 256.000000 0 0.000000
552 512.000000 0 0.000000
553 1024.000000 0 0.000000
554 2048.000000 0 0.000000
555 4096.000000 0 0.000000
556 8192.000000 0 0.000000
557 16384.000000 0 0.000000
558 32768.000000 0 0.000000
559 65536.000000 0 0.000000
560 131072.000000 0 0.000000
561 262144.000000 0 0.000000
562 524288.000000 0 0.000000
5631048576.000000 0 0.000000
5642097152.000000 0 0.000000
5654194304.000000 0 0.000000
5668388608.000000 0 0.000000
567TOO LONG 0 TOO LONG
568SET SESSION query_exec_time=default;
569SET SESSION query_exec_time=0.1;
570SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
571SET GLOBAL QUERY_RESPONSE_TIME_RANGE_BASE=10;
572FLUSH QUERY_RESPONSE_TIME;
573SHOW QUERY_RESPONSE_TIME;
574
575 0.000001 0 0.000000
576 0.000010 0 0.000000
577 0.000100 0 0.000000
578 0.001000 0 0.000000
579 0.010000 0 0.000000
580 0.100000 0 0.000000
581 1.000000 0 0.000000
582 10.000000 0 0.000000
583 100.000000 0 0.000000
584 1000.000000 0 0.000000
585 10000.000000 0 0.000000
586 100000.000000 0 0.000000
5871000000.000000 0 0.000000
588TOO LONG 0 TOO LONG
589SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
590time count total
591 0.000001 0 0.000000
592 0.000010 0 0.000000
593 0.000100 0 0.000000
594 0.001000 0 0.000000
595 0.010000 0 0.000000
596 0.100000 0 0.000000
597 1.000000 0 0.000000
598 10.000000 0 0.000000
599 100.000000 0 0.000000
600 1000.000000 0 0.000000
601 10000.000000 0 0.000000
602 100000.000000 0 0.000000
6031000000.000000 0 0.000000
604TOO LONG 0 TOO LONG
605SET GLOBAL QUERY_RESPONSE_TIME_STATS=1;
606SET SESSION query_exec_time=0.31;
607SELECT 1;
6081
6091
610SET SESSION query_exec_time=0.32;
611SELECT 1;
6121
6131
614SET SESSION query_exec_time=0.33;
615SELECT 1;
6161
6171
618SET SESSION query_exec_time=0.34;
619SELECT 1;
6201
6211
622SET SESSION query_exec_time=0.35;
623SELECT 1;
6241
6251
626SET SESSION query_exec_time=0.36;
627SELECT 1;
6281
6291
630SET SESSION query_exec_time=0.37;
631SELECT 1;
6321
6331
634SET SESSION query_exec_time=0.38;
635SELECT 1;
6361
6371
638SET SESSION query_exec_time=0.39;
639SELECT 1;
6401
6411
642SET SESSION query_exec_time=0.4;
643SELECT 1;
6441
6451
646SET SESSION query_exec_time=1.1;
647SELECT 1;
6481
6491
650SET SESSION query_exec_time=1.2;
651SELECT 1;
6521
6531
654SET SESSION query_exec_time=1.3;
655SELECT 1;
6561
6571
658SET SESSION query_exec_time=1.5;
659SELECT 1;
6601
6611
662SET SESSION query_exec_time=1.4;
663SELECT 1;
6641
6651
666SET SESSION query_exec_time=0.5;
667SELECT 1;
6681
6691
670SET SESSION query_exec_time=2.1;
671SELECT 1;
6721
6731
674SET SESSION query_exec_time=2.3;
675SELECT 1;
6761
6771
678SET SESSION query_exec_time=2.5;
679SELECT 1;
6801
6811
682SET SESSION query_exec_time=3.1;
683SELECT 1;
6841
6851
686SET SESSION query_exec_time=4.1;
687SELECT 1;
6881
6891
690SET SESSION query_exec_time=5.1;
691SELECT 1;
6921
6931
694SET SESSION query_exec_time=0.1;
695SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
696SHOW GLOBAL VARIABLES where Variable_name like 'QUERY_RESPONSE_TIME_RANGE_BASE';
697Variable_name Value
698query_response_time_range_base 10
699SHOW QUERY_RESPONSE_TIME;
700
701 0.000001 24 0.000000
702 0.000010 0 0.000000
703 0.000100 0 0.000000
704 0.001000 0 0.000000
705 0.010000 0 0.000000
706 0.100000 0 0.000000
707 1.000000 11 4.050000
708 10.000000 11 25.699999
709 100.000000 0 0.000000
710 1000.000000 0 0.000000
711 10000.000000 0 0.000000
712 100000.000000 0 0.000000
7131000000.000000 0 0.000000
714TOO LONG 0 TOO LONG
715SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
716time count total
717 0.000001 24 0.000000
718 0.000010 0 0.000000
719 0.000100 0 0.000000
720 0.001000 0 0.000000
721 0.010000 0 0.000000
722 0.100000 0 0.000000
723 1.000000 11 4.050000
724 10.000000 11 25.699999
725 100.000000 0 0.000000
726 1000.000000 0 0.000000
727 10000.000000 0 0.000000
728 100000.000000 0 0.000000
7291000000.000000 0 0.000000
730TOO LONG 0 TOO LONG
731SET SESSION query_exec_time=default;
732SET SESSION query_exec_time=0.1;
733SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
734SET GLOBAL QUERY_RESPONSE_TIME_RANGE_BASE=7;
735FLUSH QUERY_RESPONSE_TIME;
736SHOW QUERY_RESPONSE_TIME;
737
738 0.000001 0 0.000000
739 0.000008 0 0.000000
740 0.000059 0 0.000000
741 0.000416 0 0.000000
742 0.002915 0 0.000000
743 0.020408 0 0.000000
744 0.142857 0 0.000000
745 1.000000 0 0.000000
746 7.000000 0 0.000000
747 49.000000 0 0.000000
748 343.000000 0 0.000000
749 2401.000000 0 0.000000
750 16807.000000 0 0.000000
751 117649.000000 0 0.000000
752 823543.000000 0 0.000000
7535764801.000000 0 0.000000
754TOO LONG 0 TOO LONG
755SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
756time count total
757 0.000001 0 0.000000
758 0.000008 0 0.000000
759 0.000059 0 0.000000
760 0.000416 0 0.000000
761 0.002915 0 0.000000
762 0.020408 0 0.000000
763 0.142857 0 0.000000
764 1.000000 0 0.000000
765 7.000000 0 0.000000
766 49.000000 0 0.000000
767 343.000000 0 0.000000
768 2401.000000 0 0.000000
769 16807.000000 0 0.000000
770 117649.000000 0 0.000000
771 823543.000000 0 0.000000
7725764801.000000 0 0.000000
773TOO LONG 0 TOO LONG
774SET GLOBAL QUERY_RESPONSE_TIME_STATS=1;
775SET SESSION query_exec_time=0.31;
776SELECT 1;
7771
7781
779SET SESSION query_exec_time=0.32;
780SELECT 1;
7811
7821
783SET SESSION query_exec_time=0.33;
784SELECT 1;
7851
7861
787SET SESSION query_exec_time=0.34;
788SELECT 1;
7891
7901
791SET SESSION query_exec_time=0.35;
792SELECT 1;
7931
7941
795SET SESSION query_exec_time=0.36;
796SELECT 1;
7971
7981
799SET SESSION query_exec_time=0.37;
800SELECT 1;
8011
8021
803SET SESSION query_exec_time=0.38;
804SELECT 1;
8051
8061
807SET SESSION query_exec_time=0.39;
808SELECT 1;
8091
8101
811SET SESSION query_exec_time=0.4;
812SELECT 1;
8131
8141
815SET SESSION query_exec_time=1.1;
816SELECT 1;
8171
8181
819SET SESSION query_exec_time=1.2;
820SELECT 1;
8211
8221
823SET SESSION query_exec_time=1.3;
824SELECT 1;
8251
8261
827SET SESSION query_exec_time=1.5;
828SELECT 1;
8291
8301
831SET SESSION query_exec_time=1.4;
832SELECT 1;
8331
8341
835SET SESSION query_exec_time=0.5;
836SELECT 1;
8371
8381
839SET SESSION query_exec_time=2.1;
840SELECT 1;
8411
8421
843SET SESSION query_exec_time=2.3;
844SELECT 1;
8451
8461
847SET SESSION query_exec_time=2.5;
848SELECT 1;
8491
8501
851SET SESSION query_exec_time=3.1;
852SELECT 1;
8531
8541
855SET SESSION query_exec_time=4.1;
856SELECT 1;
8571
8581
859SET SESSION query_exec_time=5.1;
860SELECT 1;
8611
8621
863SET SESSION query_exec_time=0.1;
864SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
865SHOW GLOBAL VARIABLES where Variable_name like 'QUERY_RESPONSE_TIME_RANGE_BASE';
866Variable_name Value
867query_response_time_range_base 7
868SHOW QUERY_RESPONSE_TIME;
869
870 0.000001 24 0.000000
871 0.000008 0 0.000000
872 0.000059 0 0.000000
873 0.000416 0 0.000000
874 0.002915 0 0.000000
875 0.020408 0 0.000000
876 0.142857 0 0.000000
877 1.000000 11 4.050000
878 7.000000 11 25.699999
879 49.000000 0 0.000000
880 343.000000 0 0.000000
881 2401.000000 0 0.000000
882 16807.000000 0 0.000000
883 117649.000000 0 0.000000
884 823543.000000 0 0.000000
8855764801.000000 0 0.000000
886TOO LONG 0 TOO LONG
887SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
888time count total
889 0.000001 24 0.000000
890 0.000008 0 0.000000
891 0.000059 0 0.000000
892 0.000416 0 0.000000
893 0.002915 0 0.000000
894 0.020408 0 0.000000
895 0.142857 0 0.000000
896 1.000000 11 4.050000
897 7.000000 11 25.699999
898 49.000000 0 0.000000
899 343.000000 0 0.000000
900 2401.000000 0 0.000000
901 16807.000000 0 0.000000
902 117649.000000 0 0.000000
903 823543.000000 0 0.000000
9045764801.000000 0 0.000000
905TOO LONG 0 TOO LONG
906SET SESSION query_exec_time=default;
907SET SESSION query_exec_time=0.1;
908SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
909SET GLOBAL QUERY_RESPONSE_TIME_RANGE_BASE=156;
910FLUSH QUERY_RESPONSE_TIME;
911SHOW QUERY_RESPONSE_TIME;
912
913 0.000041 0 0.000000
914 0.006410 0 0.000000
915 1.000000 0 0.000000
916 156.000000 0 0.000000
917 24336.000000 0 0.000000
9183796416.000000 0 0.000000
919TOO LONG 0 TOO LONG
920SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
921time count total
922 0.000041 0 0.000000
923 0.006410 0 0.000000
924 1.000000 0 0.000000
925 156.000000 0 0.000000
926 24336.000000 0 0.000000
9273796416.000000 0 0.000000
928TOO LONG 0 TOO LONG
929SET GLOBAL QUERY_RESPONSE_TIME_STATS=1;
930SET SESSION query_exec_time=0.31;
931SELECT 1;
9321
9331
934SET SESSION query_exec_time=0.32;
935SELECT 1;
9361
9371
938SET SESSION query_exec_time=0.33;
939SELECT 1;
9401
9411
942SET SESSION query_exec_time=0.34;
943SELECT 1;
9441
9451
946SET SESSION query_exec_time=0.35;
947SELECT 1;
9481
9491
950SET SESSION query_exec_time=0.36;
951SELECT 1;
9521
9531
954SET SESSION query_exec_time=0.37;
955SELECT 1;
9561
9571
958SET SESSION query_exec_time=0.38;
959SELECT 1;
9601
9611
962SET SESSION query_exec_time=0.39;
963SELECT 1;
9641
9651
966SET SESSION query_exec_time=0.4;
967SELECT 1;
9681
9691
970SET SESSION query_exec_time=1.1;
971SELECT 1;
9721
9731
974SET SESSION query_exec_time=1.2;
975SELECT 1;
9761
9771
978SET SESSION query_exec_time=1.3;
979SELECT 1;
9801
9811
982SET SESSION query_exec_time=1.5;
983SELECT 1;
9841
9851
986SET SESSION query_exec_time=1.4;
987SELECT 1;
9881
9891
990SET SESSION query_exec_time=0.5;
991SELECT 1;
9921
9931
994SET SESSION query_exec_time=2.1;
995SELECT 1;
9961
9971
998SET SESSION query_exec_time=2.3;
999SELECT 1;
10001
10011
1002SET SESSION query_exec_time=2.5;
1003SELECT 1;
10041
10051
1006SET SESSION query_exec_time=3.1;
1007SELECT 1;
10081
10091
1010SET SESSION query_exec_time=4.1;
1011SELECT 1;
10121
10131
1014SET SESSION query_exec_time=5.1;
1015SELECT 1;
10161
10171
1018SET SESSION query_exec_time=0.1;
1019SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
1020SHOW GLOBAL VARIABLES where Variable_name like 'QUERY_RESPONSE_TIME_RANGE_BASE';
1021Variable_name Value
1022query_response_time_range_base 156
1023SHOW QUERY_RESPONSE_TIME;
1024
1025 0.000041 24 0.000000
1026 0.006410 0 0.000000
1027 1.000000 11 4.050000
1028 156.000000 11 25.699999
1029 24336.000000 0 0.000000
10303796416.000000 0 0.000000
1031TOO LONG 0 TOO LONG
1032SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
1033time count total
1034 0.000041 24 0.000000
1035 0.006410 0 0.000000
1036 1.000000 11 4.050000
1037 156.000000 11 25.699999
1038 24336.000000 0 0.000000
10393796416.000000 0 0.000000
1040TOO LONG 0 TOO LONG
1041SET SESSION query_exec_time=default;
1042SET SESSION query_exec_time=0.1;
1043SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
1044SET GLOBAL QUERY_RESPONSE_TIME_RANGE_BASE=1000;
1045FLUSH QUERY_RESPONSE_TIME;
1046SHOW QUERY_RESPONSE_TIME;
1047
1048 0.000001 0 0.000000
1049 0.001000 0 0.000000
1050 1.000000 0 0.000000
1051 1000.000000 0 0.000000
10521000000.000000 0 0.000000
1053TOO LONG 0 TOO LONG
1054SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
1055time count total
1056 0.000001 0 0.000000
1057 0.001000 0 0.000000
1058 1.000000 0 0.000000
1059 1000.000000 0 0.000000
10601000000.000000 0 0.000000
1061TOO LONG 0 TOO LONG
1062SET GLOBAL QUERY_RESPONSE_TIME_STATS=1;
1063SET SESSION query_exec_time=0.31;
1064SELECT 1;
10651
10661
1067SET SESSION query_exec_time=0.32;
1068SELECT 1;
10691
10701
1071SET SESSION query_exec_time=0.33;
1072SELECT 1;
10731
10741
1075SET SESSION query_exec_time=0.34;
1076SELECT 1;
10771
10781
1079SET SESSION query_exec_time=0.35;
1080SELECT 1;
10811
10821
1083SET SESSION query_exec_time=0.36;
1084SELECT 1;
10851
10861
1087SET SESSION query_exec_time=0.37;
1088SELECT 1;
10891
10901
1091SET SESSION query_exec_time=0.38;
1092SELECT 1;
10931
10941
1095SET SESSION query_exec_time=0.39;
1096SELECT 1;
10971
10981
1099SET SESSION query_exec_time=0.4;
1100SELECT 1;
11011
11021
1103SET SESSION query_exec_time=1.1;
1104SELECT 1;
11051
11061
1107SET SESSION query_exec_time=1.2;
1108SELECT 1;
11091
11101
1111SET SESSION query_exec_time=1.3;
1112SELECT 1;
11131
11141
1115SET SESSION query_exec_time=1.5;
1116SELECT 1;
11171
11181
1119SET SESSION query_exec_time=1.4;
1120SELECT 1;
11211
11221
1123SET SESSION query_exec_time=0.5;
1124SELECT 1;
11251
11261
1127SET SESSION query_exec_time=2.1;
1128SELECT 1;
11291
11301
1131SET SESSION query_exec_time=2.3;
1132SELECT 1;
11331
11341
1135SET SESSION query_exec_time=2.5;
1136SELECT 1;
11371
11381
1139SET SESSION query_exec_time=3.1;
1140SELECT 1;
11411
11421
1143SET SESSION query_exec_time=4.1;
1144SELECT 1;
11451
11461
1147SET SESSION query_exec_time=5.1;
1148SELECT 1;
11491
11501
1151SET SESSION query_exec_time=0.1;
1152SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
1153SHOW GLOBAL VARIABLES where Variable_name like 'QUERY_RESPONSE_TIME_RANGE_BASE';
1154Variable_name Value
1155query_response_time_range_base 1000
1156SHOW QUERY_RESPONSE_TIME;
1157
1158 0.000001 24 0.000000
1159 0.001000 0 0.000000
1160 1.000000 11 4.050000
1161 1000.000000 11 25.699999
11621000000.000000 0 0.000000
1163TOO LONG 0 TOO LONG
1164SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
1165time count total
1166 0.000001 24 0.000000
1167 0.001000 0 0.000000
1168 1.000000 11 4.050000
1169 1000.000000 11 25.699999
11701000000.000000 0 0.000000
1171TOO LONG 0 TOO LONG
1172SET SESSION query_exec_time=default;
1173SET SESSION query_exec_time=0.1;
1174SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
1175SET GLOBAL QUERY_RESPONSE_TIME_RANGE_BASE=1001;
1176Warnings:
1177Warning 1292 Truncated incorrect query_response_time_range_base value: '1001'
1178FLUSH QUERY_RESPONSE_TIME;
1179SHOW QUERY_RESPONSE_TIME;
1180
1181 0.000001 0 0.000000
1182 0.001000 0 0.000000
1183 1.000000 0 0.000000
1184 1000.000000 0 0.000000
11851000000.000000 0 0.000000
1186TOO LONG 0 TOO LONG
1187SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
1188time count total
1189 0.000001 0 0.000000
1190 0.001000 0 0.000000
1191 1.000000 0 0.000000
1192 1000.000000 0 0.000000
11931000000.000000 0 0.000000
1194TOO LONG 0 TOO LONG
1195SET GLOBAL QUERY_RESPONSE_TIME_STATS=1;
1196SET SESSION query_exec_time=0.31;
1197SELECT 1;
11981
11991
1200SET SESSION query_exec_time=0.32;
1201SELECT 1;
12021
12031
1204SET SESSION query_exec_time=0.33;
1205SELECT 1;
12061
12071
1208SET SESSION query_exec_time=0.34;
1209SELECT 1;
12101
12111
1212SET SESSION query_exec_time=0.35;
1213SELECT 1;
12141
12151
1216SET SESSION query_exec_time=0.36;
1217SELECT 1;
12181
12191
1220SET SESSION query_exec_time=0.37;
1221SELECT 1;
12221
12231
1224SET SESSION query_exec_time=0.38;
1225SELECT 1;
12261
12271
1228SET SESSION query_exec_time=0.39;
1229SELECT 1;
12301
12311
1232SET SESSION query_exec_time=0.4;
1233SELECT 1;
12341
12351
1236SET SESSION query_exec_time=1.1;
1237SELECT 1;
12381
12391
1240SET SESSION query_exec_time=1.2;
1241SELECT 1;
12421
12431
1244SET SESSION query_exec_time=1.3;
1245SELECT 1;
12461
12471
1248SET SESSION query_exec_time=1.5;
1249SELECT 1;
12501
12511
1252SET SESSION query_exec_time=1.4;
1253SELECT 1;
12541
12551
1256SET SESSION query_exec_time=0.5;
1257SELECT 1;
12581
12591
1260SET SESSION query_exec_time=2.1;
1261SELECT 1;
12621
12631
1264SET SESSION query_exec_time=2.3;
1265SELECT 1;
12661
12671
1268SET SESSION query_exec_time=2.5;
1269SELECT 1;
12701
12711
1272SET SESSION query_exec_time=3.1;
1273SELECT 1;
12741
12751
1276SET SESSION query_exec_time=4.1;
1277SELECT 1;
12781
12791
1280SET SESSION query_exec_time=5.1;
1281SELECT 1;
12821
12831
1284SET SESSION query_exec_time=0.1;
1285SET GLOBAL QUERY_RESPONSE_TIME_STATS=0;
1286SHOW GLOBAL VARIABLES where Variable_name like 'QUERY_RESPONSE_TIME_RANGE_BASE';
1287Variable_name Value
1288query_response_time_range_base 1000
1289SHOW QUERY_RESPONSE_TIME;
1290
1291 0.000001 24 0.000000
1292 0.001000 0 0.000000
1293 1.000000 11 4.050000
1294 1000.000000 11 25.699999
12951000000.000000 0 0.000000
1296TOO LONG 0 TOO LONG
1297SELECT * FROM INFORMATION_SCHEMA.QUERY_RESPONSE_TIME;
1298time count total
1299 0.000001 24 0.000000
1300 0.001000 0 0.000000
1301 1.000000 11 4.050000
1302 1000.000000 11 25.699999
13031000000.000000 0 0.000000
1304TOO LONG 0 TOO LONG
1305SET SESSION query_exec_time=default;
1306SET GLOBAL QUERY_RESPONSE_TIME_RANGE_BASE=default;
1307SET GLOBAL QUERY_RESPONSE_TIME_STATS=default;
01308
=== renamed file 'Percona-Server/mysql-test/r/userstat_bug602047.result' => 'Percona-Server/mysql-test/r/percona_userstat.result'
--- Percona-Server/mysql-test/r/userstat_bug602047.result 2012-12-19 09:23:15 +0000
+++ Percona-Server/mysql-test/r/percona_userstat.result 2013-05-28 06:50:46 +0000
@@ -1,19 +1,89 @@
1DROP TABLE IF EXISTS t1;1DROP TABLE IF EXISTS t1;
2SET GLOBAL userstat=OFF;
3FLUSH CLIENT_STATISTICS;
4FLUSH INDEX_STATISTICS;
5FLUSH TABLE_STATISTICS;
6FLUSH THREAD_STATISTICS;
2FLUSH USER_STATISTICS;7FLUSH USER_STATISTICS;
3FLUSH TABLE_STATISTICS;8SELECT * FROM INFORMATION_SCHEMA.CLIENT_STATISTICS;
4FLUSH INDEX_STATISTICS;9CLIENT TOTAL_CONNECTIONS CONCURRENT_CONNECTIONS CONNECTED_TIME BUSY_TIME CPU_TIME BYTES_RECEIVED BYTES_SENT BINLOG_BYTES_WRITTEN ROWS_FETCHED ROWS_UPDATED TABLE_ROWS_READ SELECT_COMMANDS UPDATE_COMMANDS OTHER_COMMANDS COMMIT_TRANSACTIONS ROLLBACK_TRANSACTIONS DENIED_CONNECTIONS LOST_CONNECTIONS ACCESS_DENIED EMPTY_QUERIES TOTAL_SSL_CONNECTIONS
10SELECT * FROM INFORMATION_SCHEMA.INDEX_STATISTICS;
11TABLE_SCHEMA TABLE_NAME INDEX_NAME ROWS_READ
12SELECT * FROM INFORMATION_SCHEMA.TABLE_STATISTICS;
13TABLE_SCHEMA TABLE_NAME ROWS_READ ROWS_CHANGED ROWS_CHANGED_X_INDEXES
14SELECT * FROM INFORMATION_SCHEMA.THREAD_STATISTICS;
15THREAD_ID TOTAL_CONNECTIONS CONCURRENT_CONNECTIONS CONNECTED_TIME BUSY_TIME CPU_TIME BYTES_RECEIVED BYTES_SENT BINLOG_BYTES_WRITTEN ROWS_FETCHED ROWS_UPDATED TABLE_ROWS_READ SELECT_COMMANDS UPDATE_COMMANDS OTHER_COMMANDS COMMIT_TRANSACTIONS ROLLBACK_TRANSACTIONS DENIED_CONNECTIONS LOST_CONNECTIONS ACCESS_DENIED EMPTY_QUERIES TOTAL_SSL_CONNECTIONS
16SELECT * FROM INFORMATION_SCHEMA.USER_STATISTICS;
17USER TOTAL_CONNECTIONS CONCURRENT_CONNECTIONS CONNECTED_TIME BUSY_TIME CPU_TIME BYTES_RECEIVED BYTES_SENT BINLOG_BYTES_WRITTEN ROWS_FETCHED ROWS_UPDATED TABLE_ROWS_READ SELECT_COMMANDS UPDATE_COMMANDS OTHER_COMMANDS COMMIT_TRANSACTIONS ROLLBACK_TRANSACTIONS DENIED_CONNECTIONS LOST_CONNECTIONS ACCESS_DENIED EMPTY_QUERIES TOTAL_SSL_CONNECTIONS
18SHOW CLIENT_STATISTICS;
19Client Total_connections Concurrent_connections Connected_time Busy_time Cpu_time Bytes_received Bytes_sent Binlog_bytes_written Rows_fetched Rows_updated Table_rows_read Select_commands Update_commands Other_commands Commit_transactions Rollback_transactions Denied_connections Lost_connections Access_denied Empty_queries Total_ssl_connections
20SHOW INDEX_STATISTICS;
21Table_schema Table_name Index_name Rows_read
22SHOW TABLE_STATISTICS;
23Table_schema Table_name Rows_read Rows_changed Rows_changed_x_#indexes
24SHOW THREAD_STATISTICS;
25Thread_id Total_connections Concurrent_connections Connected_time Busy_time Cpu_time Bytes_received Bytes_sent Binlog_bytes_written Rows_fetched Rows_updated Table_rows_read Select_commands Update_commands Other_commands Commit_transactions Rollback_transactions Denied_connections Lost_connections Access_denied Empty_queries Total_ssl_connections
26SHOW USER_STATISTICS;
27User Total_connections Concurrent_connections Connected_time Busy_time Cpu_time Bytes_received Bytes_sent Binlog_bytes_written Rows_fetched Rows_updated Table_rows_read Select_commands Update_commands Other_commands Commit_transactions Rollback_transactions Denied_connections Lost_connections Access_denied Empty_queries Total_ssl_connections
5SET @userstat_old= @@userstat;28SET @userstat_old= @@userstat;
6SET GLOBAL userstat=ON;29SET GLOBAL userstat=ON;
7CREATE TABLE t1 ( id int(10), PRIMARY KEY (id)) ENGINE=InnoDB;30SELECT * FROM INFORMATION_SCHEMA.CLIENT_STATISTICS;
31CLIENT TOTAL_CONNECTIONS CONCURRENT_CONNECTIONS CONNECTED_TIME BUSY_TIME CPU_TIME BYTES_RECEIVED BYTES_SENT BINLOG_BYTES_WRITTEN ROWS_FETCHED ROWS_UPDATED TABLE_ROWS_READ SELECT_COMMANDS UPDATE_COMMANDS OTHER_COMMANDS COMMIT_TRANSACTIONS ROLLBACK_TRANSACTIONS DENIED_CONNECTIONS LOST_CONNECTIONS ACCESS_DENIED EMPTY_QUERIES TOTAL_SSL_CONNECTIONS
32localhost 1 CONNECTED_TIME BUSY_TIME CPU_TIME 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
33SELECT * FROM INFORMATION_SCHEMA.INDEX_STATISTICS;
34TABLE_SCHEMA TABLE_NAME INDEX_NAME ROWS_READ
35SELECT * FROM INFORMATION_SCHEMA.TABLE_STATISTICS;
36TABLE_SCHEMA TABLE_NAME ROWS_READ ROWS_CHANGED ROWS_CHANGED_X_INDEXES
37SELECT * FROM INFORMATION_SCHEMA.THREAD_STATISTICS;
38THREAD_ID TOTAL_CONNECTIONS CONCURRENT_CONNECTIONS CONNECTED_TIME BUSY_TIME CPU_TIME BYTES_RECEIVED BYTES_SENT BINLOG_BYTES_WRITTEN ROWS_FETCHED ROWS_UPDATED TABLE_ROWS_READ SELECT_COMMANDS UPDATE_COMMANDS OTHER_COMMANDS COMMIT_TRANSACTIONS ROLLBACK_TRANSACTIONS DENIED_CONNECTIONS LOST_CONNECTIONS ACCESS_DENIED EMPTY_QUERIES TOTAL_SSL_CONNECTIONS
39SELECT * FROM INFORMATION_SCHEMA.USER_STATISTICS;
40USER TOTAL_CONNECTIONS CONCURRENT_CONNECTIONS CONNECTED_TIME BUSY_TIME CPU_TIME BYTES_RECEIVED BYTES_SENT BINLOG_BYTES_WRITTEN ROWS_FETCHED ROWS_UPDATED TABLE_ROWS_READ SELECT_COMMANDS UPDATE_COMMANDS OTHER_COMMANDS COMMIT_TRANSACTIONS ROLLBACK_TRANSACTIONS DENIED_CONNECTIONS LOST_CONNECTIONS ACCESS_DENIED EMPTY_QUERIES TOTAL_SSL_CONNECTIONS
41root 1 CONNECTED_TIME BUSY_TIME CPU_TIME 0 218 0 0 1 0 0 4 0 0 0 0 0 0 0 3 0
42SHOW CLIENT_STATISTICS;
43Client Total_connections Concurrent_connections Connected_time Busy_time Cpu_time Bytes_received Bytes_sent Binlog_bytes_written Rows_fetched Rows_updated Table_rows_read Select_commands Update_commands Other_commands Commit_transactions Rollback_transactions Denied_connections Lost_connections Access_denied Empty_queries Total_ssl_connections
44localhost 1 CONNECTED_TIME BUSY_TIME CPU_TIME 0 271 0 0 2 0 0 5 0 0 0 0 0 0 0 3 0
45SHOW INDEX_STATISTICS;
46Table_schema Table_name Index_name Rows_read
47SHOW TABLE_STATISTICS;
48Table_schema Table_name Rows_read Rows_changed Rows_changed_x_#indexes
49SHOW THREAD_STATISTICS;
50Thread_id Total_connections Concurrent_connections Connected_time Busy_time Cpu_time Bytes_received Bytes_sent Binlog_bytes_written Rows_fetched Rows_updated Table_rows_read Select_commands Update_commands Other_commands Commit_transactions Rollback_transactions Denied_connections Lost_connections Access_denied Empty_queries Total_ssl_connections
51SHOW USER_STATISTICS;
52User Total_connections Concurrent_connections Connected_time Busy_time Cpu_time Bytes_received Bytes_sent Binlog_bytes_written Rows_fetched Rows_updated Table_rows_read Select_commands Update_commands Other_commands Commit_transactions Rollback_transactions Denied_connections Lost_connections Access_denied Empty_queries Total_ssl_connections
53root 1 CONNECTED_TIME BUSY_TIME CPU_TIME 0 377 0 0 3 0 0 9 0 0 0 0 0 0 0 6 0
54CREATE TABLE t1 (id int(10), PRIMARY KEY (id)) ENGINE=InnoDB;
8INSERT INTO t1 VALUES (1),(2),(3),(4),(5),(6),(7),(8),(9),(10);55INSERT INTO t1 VALUES (1),(2),(3),(4),(5),(6),(7),(8),(9),(10);
9SELECT COUNT(*) FROM t1;56SELECT COUNT(*) FROM t1;
10COUNT(*)57COUNT(*)
11105810
12SELECT ROWS_READ FROM information_schema.table_statistics WHERE TABLE_NAME='t1';59SELECT ROWS_READ FROM INFORMATION_SCHEMA.TABLE_STATISTICS WHERE TABLE_NAME='t1';
13ROWS_READ60ROWS_READ
14106110
15SELECT ROWS_READ FROM information_schema.index_statistics WHERE TABLE_NAME='t1';62SELECT ROWS_READ FROM INFORMATION_SCHEMA.INDEX_STATISTICS WHERE TABLE_NAME='t1';
16ROWS_READ63ROWS_READ
17106410
65FLUSH TABLE_STATISTICS;
66SELECT ROWS_READ FROM INFORMATION_SCHEMA.TABLE_STATISTICS WHERE TABLE_NAME='t1';
67ROWS_READ
68SELECT ROWS_READ FROM INFORMATION_SCHEMA.INDEX_STATISTICS WHERE TABLE_NAME='t1';
69ROWS_READ
7010
71FLUSH INDEX_STATISTICS;
72SELECT ROWS_READ FROM INFORMATION_SCHEMA.INDEX_STATISTICS WHERE TABLE_NAME='t1';
73ROWS_READ
74SELECT COUNT(*) FROM t1;
75COUNT(*)
7610
77SELECT ROWS_READ FROM INFORMATION_SCHEMA.TABLE_STATISTICS WHERE TABLE_NAME='t1';
78ROWS_READ
7910
80SELECT ROWS_READ FROM INFORMATION_SCHEMA.INDEX_STATISTICS WHERE TABLE_NAME='t1';
81ROWS_READ
8210
83DROP TABLE t1;
84CREATE TABLE t2 (c1 INT UNSIGNED) ENGINE=InnoDB;
85ALTER TABLE t2 MODIFY c1 FLOAT;
86SELECT * FROM INFORMATION_SCHEMA.TABLE_STATISTICS WHERE TABLE_NAME='t2';
87TABLE_SCHEMA TABLE_NAME ROWS_READ ROWS_CHANGED ROWS_CHANGED_X_INDEXES
88DROP TABLE t2;
18SET GLOBAL userstat= @userstat_old;89SET GLOBAL userstat= @userstat_old;
19DROP TABLE t1;
2090
=== removed file 'Percona-Server/mysql-test/r/percona_xtradb_admin_command.result'
--- Percona-Server/mysql-test/r/percona_xtradb_admin_command.result 2013-05-28 06:50:44 +0000
+++ Percona-Server/mysql-test/r/percona_xtradb_admin_command.result 1970-01-01 00:00:00 +0000
@@ -1,6 +0,0 @@
1select * from information_schema.XTRADB_ADMIN_COMMAND;
2result_message
3No XTRA_* command in the SQL statement. Please add /*!XTRA_xxxx*/ to the SQL.
4select * from information_schema.XTRADB_ADMIN_COMMAND /*!XTRA_HELLO*/;
5result_message
6Hello!
70
=== added file 'Percona-Server/mysql-test/r/percona_xtradb_bug317074.result'
--- Percona-Server/mysql-test/r/percona_xtradb_bug317074.result 1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/r/percona_xtradb_bug317074.result 2013-05-28 06:50:46 +0000
@@ -0,0 +1,5 @@
1SET @old_innodb_file_format=@@innodb_file_format;
2SET @old_innodb_file_format_max=@@innodb_file_format_max;
3SET @old_innodb_file_per_table=@@innodb_file_per_table;
4SET GLOBAL innodb_file_format='Barracuda';
5SET GLOBAL innodb_file_per_table=ON;
06
=== added file 'Percona-Server/mysql-test/t/percona_bug643149.test'
--- Percona-Server/mysql-test/t/percona_bug643149.test 1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/percona_bug643149.test 2013-05-28 06:50:46 +0000
@@ -0,0 +1,49 @@
1#
2# This test suffers from server
3# Bug#38124 "general_log_file" variable silently unset when using expression
4# In short:
5# SET GLOBAL general_log_file = @<whatever>
6# SET GLOBAL slow_query_log = @<whatever>
7# cause that the value of these server system variables is set to default
8# instead of the assigned values. There comes no error message or warning.
9# If this bug is fixed please
10# 1. try this test with "let $fixed_bug38124 = 0;"
11# 2. remove all workarounds if 1. was successful.
12--source include/have_profiling.inc
13let $fixed_bug38124 = 0;
14
15SET @old_slow_query_log_file=@@global.slow_query_log_file;
16SET GLOBAL slow_query_log=on;
17SET LOCAL log_slow_verbosity='profiling';
18SET LOCAL long_query_time=0;
19
20let slogfile=$MYSQLTEST_VARDIR/percona_bug643149_slow.log;
21--replace_result $MYSQLTEST_VARDIR MYSQLTEST_VARDIR
22--eval SET GLOBAL slow_query_log_file='$slogfile';
23
24SELECT 1;
25
26perl;
27 $slogfile= $ENV{'slogfile'};
28
29 open(FILE, "$slogfile") or
30 die("Unable to read slow query log file $slogfile: $!\n");
31 while(<FILE>) {
32 next if (!/^#/);
33 next if (/^# Time:/);
34 s/[0-9]+/X/g;
35 print;
36 }
37
38 close(FILE);
39EOF
40
41SET GLOBAL slow_query_log_file=@old_slow_query_log_file;
42
43if(!$fixed_bug38124)
44{
45 --disable_query_log
46 let $my_var = `SELECT @old_slow_query_log_file`;
47 eval SET @@global.slow_query_log_file = '$my_var';
48 --enable_query_log
49}
050
=== removed file 'Percona-Server/mysql-test/t/percona_bug933969.test'
--- Percona-Server/mysql-test/t/percona_bug933969.test 2013-05-28 06:50:44 +0000
+++ Percona-Server/mysql-test/t/percona_bug933969.test 1970-01-01 00:00:00 +0000
@@ -1,42 +0,0 @@
1###################### percona_bug933969.test ########################
2# Bug #933969: mysqlbinlog doesn't accept stdin #
3# #
4# The goal of this testcase is to test that mysqlbinlog handle #
5# stdin correctly when stdin is pipe. #
6# i.e. "cat log | mysqlbinlog -" don't cause mysqlbinlog failure #
7######################################################################
8-- source include/have_log_bin.inc
9-- source include/not_windows.inc
10-- source include/not_embedded.inc
11
12# deletes all the binary logs
13RESET MASTER;
14
15--disable_warnings
16DROP TABLE IF EXISTS t1;
17--enable_warnings
18
19# produce some statements for binlog
20
21CREATE TABLE t1 (word VARCHAR(20));
22
23INSERT INTO t1 VALUES ("hamite");
24INSERT INTO t1 VALUES ("hoho");
25INSERT INTO t1 VALUES ("znamenito");
26INSERT INTO t1 VALUES ("mrachny");
27INSERT INTO t1 VALUES ("mrak");
28INSERT INTO t1 VALUES ("zhut");
29INSERT INTO t1 VALUES ("parnisha");
30INSERT INTO t1 VALUES ("krrasota!");
31INSERT INTO t1 VALUES ("podumayesh");
32INSERT INTO t1 VALUES ("ogo!");
33
34FLUSH LOGS;
35
36# run mysqlbinlog and make sure it ends normally
37
38let $MYSQLD_DATADIR= `SELECT @@datadir`;
39--system cat $MYSQLD_DATADIR/master-bin.000001 | $MYSQL_BINLOG - >/dev/null
40
41DROP TABLE t1;
42RESET MASTER;
430
=== added file 'Percona-Server/mysql-test/t/percona_log_slow_verbosity-cl.test'
--- Percona-Server/mysql-test/t/percona_log_slow_verbosity-cl.test 1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/percona_log_slow_verbosity-cl.test 2013-05-28 06:50:46 +0000
@@ -0,0 +1,2 @@
1SHOW VARIABLES LIKE 'log_slow_verbosity';
2SHOW GLOBAL VARIABLES LIKE 'log_slow_verbosity';
03
=== added file 'Percona-Server/mysql-test/t/percona_query_cache_with_comments_prepared_statements.test'
--- Percona-Server/mysql-test/t/percona_query_cache_with_comments_prepared_statements.test 1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/percona_query_cache_with_comments_prepared_statements.test 2013-05-28 06:50:46 +0000
@@ -0,0 +1,208 @@
1-- source include/have_query_cache.inc
2
3set GLOBAL query_cache_size=1355776;
4
5# Reset query cache variables.
6flush query cache; # This crashed in some versions
7flush query cache; # This crashed in some versions
8reset query cache;
9flush status;
10--disable_warnings
11drop table if exists t1;
12--enable_warnings
13
14#
15# First simple test
16#
17
18create table t1 (a int not null);
19insert into t1 values (1),(2),(3);
20
21set global query_cache_strip_comments=ON;
22
23show status like "Qcache_queries_in_cache";
24show status like "Qcache_inserts";
25show status like "Qcache_hits";
26
27prepare stmt from '/* with comment */ select * from t1';
28execute stmt;
29
30show status like "Qcache_queries_in_cache";
31show status like "Qcache_inserts";
32show status like "Qcache_hits";
33
34execute stmt;
35execute stmt;
36execute stmt;
37execute stmt;
38execute stmt;
39
40show status like "Qcache_queries_in_cache";
41show status like "Qcache_inserts";
42show status like "Qcache_hits";
43
44prepare stmt from 'select * from t1';
45execute stmt;
46
47show status like "Qcache_queries_in_cache";
48show status like "Qcache_inserts";
49show status like "Qcache_hits";
50
51prepare stmt from 'select * /*internal comment*/from t1';
52execute stmt;
53
54show status like "Qcache_queries_in_cache";
55show status like "Qcache_inserts";
56show status like "Qcache_hits";
57
58prepare stmt from 'select * /*internal comment*/ from t1';
59execute stmt;
60
61show status like "Qcache_queries_in_cache";
62show status like "Qcache_inserts";
63show status like "Qcache_hits";
64
65prepare stmt from 'select * from t1 /* at the end */';
66execute stmt;
67
68show status like "Qcache_queries_in_cache";
69show status like "Qcache_inserts";
70show status like "Qcache_hits";
71
72prepare stmt from 'select * from t1 /* with "quote" */';
73execute stmt;
74
75show status like "Qcache_queries_in_cache";
76show status like "Qcache_inserts";
77show status like "Qcache_hits";
78
79prepare stmt from 'select * from t1 /* with \'quote\' */';
80execute stmt;
81
82show status like "Qcache_queries_in_cache";
83show status like "Qcache_inserts";
84show status like "Qcache_hits";
85
86prepare stmt from 'select * from t1 # 123
87';
88execute stmt;
89
90show status like "Qcache_queries_in_cache";
91show status like "Qcache_inserts";
92show status like "Qcache_hits";
93
94prepare stmt from 'select * from t1 # 123 with "quote"
95';
96execute stmt;
97
98show status like "Qcache_queries_in_cache";
99show status like "Qcache_inserts";
100show status like "Qcache_hits";
101
102prepare stmt from 'select * from t1 # 123 with \'quote\'
103';
104execute stmt;
105
106show status like "Qcache_queries_in_cache";
107show status like "Qcache_inserts";
108show status like "Qcache_hits";
109
110prepare stmt from 'select * from t1
111# 123
112';
113execute stmt;
114
115show status like "Qcache_queries_in_cache";
116show status like "Qcache_inserts";
117show status like "Qcache_hits";
118
119prepare stmt from '#456
120select * from t1
121# 123
122';
123execute stmt;
124
125show status like "Qcache_queries_in_cache";
126show status like "Qcache_inserts";
127show status like "Qcache_hits";
128
129prepare stmt from 'select * from t1 -- 123
130';
131execute stmt;
132
133show status like "Qcache_queries_in_cache";
134show status like "Qcache_inserts";
135show status like "Qcache_hits";
136
137prepare stmt from 'select * from t1
138-- 123
139';
140execute stmt;
141
142show status like "Qcache_queries_in_cache";
143show status like "Qcache_inserts";
144show status like "Qcache_hits";
145
146prepare stmt from '-- comment in first
147select * from t1
148# 123
149';
150execute stmt;
151
152show status like "Qcache_queries_in_cache";
153show status like "Qcache_inserts";
154show status like "Qcache_hits";
155
156prepare stmt from '(#456(
157select * from t1
158# 123(
159)';
160execute stmt;
161
162show status like "Qcache_queries_in_cache";
163show status like "Qcache_inserts";
164show status like "Qcache_hits";
165
166prepare stmt from '/*test*/(-- comment in first(
167select * from t1
168-- 123 asdasd
169/* test */)';
170execute stmt;
171
172show status like "Qcache_queries_in_cache";
173show status like "Qcache_inserts";
174show status like "Qcache_hits";
175
176prepare stmt from 'select "test",a from t1';
177execute stmt;
178execute stmt;
179
180show status like "Qcache_queries_in_cache";
181show status like "Qcache_inserts";
182show status like "Qcache_hits";
183
184prepare stmt from 'select "test /* internal \'comment\' */",a from t1';
185execute stmt;
186
187show status like "Qcache_queries_in_cache";
188show status like "Qcache_inserts";
189show status like "Qcache_hits";
190
191prepare stmt from 'select "test #internal comment" ,a from t1';
192execute stmt;
193
194show status like "Qcache_queries_in_cache";
195show status like "Qcache_inserts";
196show status like "Qcache_hits";
197
198prepare stmt from 'select "test #internal comment" #external comment
199,a from t1';
200execute stmt;
201
202show status like "Qcache_queries_in_cache";
203show status like "Qcache_inserts";
204show status like "Qcache_hits";
205
206DROP TABLE t1;
207SET GLOBAL query_cache_size= default;
208set global query_cache_strip_comments=OFF;
0209
=== removed file 'Percona-Server/mysql-test/t/percona_server_variables_debug.test'
--- Percona-Server/mysql-test/t/percona_server_variables_debug.test 2013-05-28 06:50:44 +0000
+++ Percona-Server/mysql-test/t/percona_server_variables_debug.test 1970-01-01 00:00:00 +0000
@@ -1,2 +0,0 @@
1--source include/have_debug.inc
2--source include/percona_server_variables.inc
30
=== added file 'Percona-Server/mysql-test/t/percona_server_variables_release.test'
--- Percona-Server/mysql-test/t/percona_server_variables_release.test 1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/percona_server_variables_release.test 2013-05-28 06:50:46 +0000
@@ -0,0 +1,2 @@
1--source include/have_nodebug.inc
2--source include/percona_server_variables.inc
03
=== added file 'Percona-Server/mysql-test/t/percona_sql_no_fcache.test'
--- Percona-Server/mysql-test/t/percona_sql_no_fcache.test 1970-01-01 00:00:00 +0000
+++ Percona-Server/mysql-test/t/percona_sql_no_fcache.test 2013-05-28 06:50:46 +0000
@@ -0,0 +1,11 @@
1--disable_warnings
2drop table if exists t1;
3--enable_warnings
4
5create table t (a int not null);
6insert into t values (1),(2),(3);
7
8SELECT SQL_NO_FCACHE SLEEP(0);
9SELECT /*!40001 SQL_NO_CACHE */ /*!50084 SQL_NO_FCACHE */ * FROM t;
10
11DROP TABLE t;
012
=== renamed file 'Percona-Server/mysql-test/t/userstat_bug602047.test' => 'Percona-Server/mysql-test/t/percona_userstat.test'
--- Percona-Server/mysql-test/t/userstat_bug602047.test 2012-12-19 09:23:15 +0000
+++ Percona-Server/mysql-test/t/percona_userstat.test 2013-05-28 06:50:46 +0000
@@ -1,16 +1,91 @@
1--source include/have_innodb.inc1--source include/have_innodb.inc
2
2--disable_warnings3--disable_warnings
3DROP TABLE IF EXISTS t1; 4DROP TABLE IF EXISTS t1;
4--enable_warnings5--enable_warnings
6
7# Test that FLUSH works with userstat disabled
8SET GLOBAL userstat=OFF;
9
10FLUSH CLIENT_STATISTICS;
11FLUSH INDEX_STATISTICS;
12FLUSH TABLE_STATISTICS;
13FLUSH THREAD_STATISTICS;
5FLUSH USER_STATISTICS;14FLUSH USER_STATISTICS;
6FLUSH TABLE_STATISTICS;15
7FLUSH INDEX_STATISTICS;16# Test that I_S and SHOW queries work with userstat disabled
17SELECT * FROM INFORMATION_SCHEMA.CLIENT_STATISTICS;
18SELECT * FROM INFORMATION_SCHEMA.INDEX_STATISTICS;
19SELECT * FROM INFORMATION_SCHEMA.TABLE_STATISTICS;
20SELECT * FROM INFORMATION_SCHEMA.THREAD_STATISTICS;
21SELECT * FROM INFORMATION_SCHEMA.USER_STATISTICS;
22
23SHOW CLIENT_STATISTICS;
24SHOW INDEX_STATISTICS;
25SHOW TABLE_STATISTICS;
26SHOW THREAD_STATISTICS;
27SHOW USER_STATISTICS;
28
8SET @userstat_old= @@userstat;29SET @userstat_old= @@userstat;
9SET GLOBAL userstat=ON;30SET GLOBAL userstat=ON;
10CREATE TABLE t1 ( id int(10), PRIMARY KEY (id)) ENGINE=InnoDB;31
32# Test that statistics start at empty state
33
34--replace_column 3 CONNECTED_TIME 4 BUSY_TIME 5 CPU_TIME
35SELECT * FROM INFORMATION_SCHEMA.CLIENT_STATISTICS;
36SELECT * FROM INFORMATION_SCHEMA.INDEX_STATISTICS;
37SELECT * FROM INFORMATION_SCHEMA.TABLE_STATISTICS;
38--replace_column 3 CONNECTED_TIME 4 BUSY_TIME 5 CPU_TIME
39SELECT * FROM INFORMATION_SCHEMA.THREAD_STATISTICS;
40--replace_column 3 CONNECTED_TIME 4 BUSY_TIME 5 CPU_TIME
41SELECT * FROM INFORMATION_SCHEMA.USER_STATISTICS;
42
43--replace_column 3 CONNECTED_TIME 4 BUSY_TIME 5 CPU_TIME
44SHOW CLIENT_STATISTICS;
45SHOW INDEX_STATISTICS;
46SHOW TABLE_STATISTICS;
47--replace_column 3 CONNECTED_TIME 4 BUSY_TIME 5 CPU_TIME
48SHOW THREAD_STATISTICS;
49--replace_column 3 CONNECTED_TIME 4 BUSY_TIME 5 CPU_TIME
50SHOW USER_STATISTICS;
51
52# Bug 602047 (wrong rows_read value)
53
54CREATE TABLE t1 (id int(10), PRIMARY KEY (id)) ENGINE=InnoDB;
11INSERT INTO t1 VALUES (1),(2),(3),(4),(5),(6),(7),(8),(9),(10);55INSERT INTO t1 VALUES (1),(2),(3),(4),(5),(6),(7),(8),(9),(10);
12SELECT COUNT(*) FROM t1; 56SELECT COUNT(*) FROM t1;
13SELECT ROWS_READ FROM information_schema.table_statistics WHERE TABLE_NAME='t1';57SELECT ROWS_READ FROM INFORMATION_SCHEMA.TABLE_STATISTICS WHERE TABLE_NAME='t1';
14SELECT ROWS_READ FROM information_schema.index_statistics WHERE TABLE_NAME='t1';58SELECT ROWS_READ FROM INFORMATION_SCHEMA.INDEX_STATISTICS WHERE TABLE_NAME='t1';
59
60# Test that FLUSH clears one table but not another
61
62FLUSH TABLE_STATISTICS;
63
64SELECT ROWS_READ FROM INFORMATION_SCHEMA.TABLE_STATISTICS WHERE TABLE_NAME='t1';
65SELECT ROWS_READ FROM INFORMATION_SCHEMA.INDEX_STATISTICS WHERE TABLE_NAME='t1';
66
67# Test that FLUSH clears both tables now
68
69FLUSH INDEX_STATISTICS;
70
71SELECT ROWS_READ FROM INFORMATION_SCHEMA.INDEX_STATISTICS WHERE TABLE_NAME='t1';
72
73# Test that stats are collected after the FLUSH again
74
75SELECT COUNT(*) FROM t1;
76SELECT ROWS_READ FROM INFORMATION_SCHEMA.TABLE_STATISTICS WHERE TABLE_NAME='t1';
77SELECT ROWS_READ FROM INFORMATION_SCHEMA.INDEX_STATISTICS WHERE TABLE_NAME='t1';
78
79DROP TABLE t1;
80
81# Bug 1183625 (handler::update_global_table_stats crash).
82
83CREATE TABLE t2 (c1 INT UNSIGNED) ENGINE=InnoDB;
84
85ALTER TABLE t2 MODIFY c1 FLOAT;
86
87SELECT * FROM INFORMATION_SCHEMA.TABLE_STATISTICS WHERE TABLE_NAME='t2';
88
89DROP TABLE t2;
90
15SET GLOBAL userstat= @userstat_old;91SET GLOBAL userstat= @userstat_old;
16DROP TABLE t1;
1792
=== removed file 'build/debian/copyright'
--- build/debian/copyright 2013-05-28 06:50:44 +0000
+++ build/debian/copyright 1970-01-01 00:00:00 +0000
@@ -1,86 +0,0 @@
1This work was packaged for Debian by:
2
3 Aleksandr Kuzminsky <aleksandr.kuzminsky@percona.com> on Tue, 11 Jan 2011 07:17:08 -0800
4
5It was downloaded from:
6
7 http://www.percona.com/downloads/
8
9Upstream Author(s):
10
11 mysql-dev@percona.com
12
13Copyright:
14
15 Copyright (C) 2006-2011 Percona Inc.
16
17License:
18
19 This package is free software; you can redistribute it and/or modify
20 it under the terms of the GNU General Public License version 2 as
21 published by the Free Software Foundation.
22
23 This package is distributed in the hope that it will be useful,
24 but WITHOUT ANY WARRANTY; without even the implied warranty of
25 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
26 GNU General Public License for more details.
27
28 You should have received a copy of the GNU General Public License
29 along with this program. If not, see <http://www.gnu.org/licenses/>
30
31On Debian systems, the complete text of the GNU General
32Public License version 2 can be found in "/usr/share/common-licenses/GPL-2".
33
34The Debian packaging is:
35
36 Copyright (C) 2011 Aleksandr Kuzminsky <aleksandr.kuzminsky@percona.com>
37
38you can redistribute it and/or modify
39it under the terms of the GNU General Public License as published by
40the Free Software Foundation; either version 2 of the License, or
41(at your option) any later version.
42
43Other copyrights:
44
45Patch innodb-deadlock-count-patch
46
47Copyright:
48
49 COPYING.innodb-deadlock-count-patch
50
51Patch show_temp
52
53Copyright:
54
55 COPYING.show_temp_51
56
57== innotop ==
58
59Author: Baron Schwartz <baron@xaprb.com>
60URL: http://innotop.sourceforge.net
61
62License:
63> This software is dual licensed, either GPL version 2 or Artistic License.
64>
65> This package is free software; you can redistribute it and/or modify
66> it under the terms of the GNU General Public License as published by
67> the Free Software Foundation; either version 2 of the License, or
68> (at your option) any later version.
69>
70> This package is distributed in the hope that it will be useful,
71> but WITHOUT ANY WARRANTY; without even the implied warranty of
72> MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
73> GNU General Public License for more details.
74>
75> You should have received a copy of the GNU General Public License
76> along with this package; if not, write to the Free Software
77> Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
78
79On Debian systems, the complete text of the GNU General Public License and the
80Artistic License can be found in `/usr/share/common-licenses/'.
81
82The upstream author explained here: http://bugs.gentoo.org/show_bug.cgi?id=14760
83that these licenses also apply to the following files:
84- innotop.html
85- InnoDBParser.pm
86
870
=== added directory 'doc/source/_static'
=== added file 'doc/source/compatibility.rst'
--- doc/source/compatibility.rst 1970-01-01 00:00:00 +0000
+++ doc/source/compatibility.rst 2013-05-28 06:50:46 +0000
@@ -0,0 +1,27 @@
1.. _compatibility:
2
3==============================================================
4Options that make XtraDB tablespaces not compatible with MySQL
5==============================================================
6
7Fast checksums
8==============
9
10Enabling :variable:`innodb_fast_checksum` will use more CPU-efficient algorithm, based on 4-byte words which can be beneficial for some workloads. Once enabled, turning it off will require table to be dump/imported again, since |Percona Server| will fail to start on data files created when :variable:`innodb_fast_checksums` was enabled.
11
12In case you've migrated from |Percona Server| to |MySQL| you could get the "corrupted checksum" error message. In order to recover that table you'll need to:
13
14 1) Reinstall Percona Server to read your tables that were created with fast checksums.
15 2) Dump the tables (or temporarily convert them to MyISAM).
16 3) Install stock MySQL (or at least disable fast checksums).
17 4) Restore the InnoDB tables (or convert back from MyISAM).
18
19Page sizes other than 16KiB
20===========================
21
22This is controlled by variable :variable:`innodb_page_size`. Changing the page size for an existing database is not supported. Table will need to be dumped/imported again if compatibility with |MySQL| is required.
23
24Relocation of the doublewrite buffer
25====================================
26
27Variable :variable:`innodb_doublewrite_file` provides an option to put the buffer on a dedicated disk in order to parallelize I/O activity on the buffer and on the tablespace. Only in case of crash recovery this variable cannot be changed, in all other cases it can be turned on/off without breaking the compatibility.
028
=== added file 'doc/source/diagnostics/innodb_stats.rst'
--- doc/source/diagnostics/innodb_stats.rst 1970-01-01 00:00:00 +0000
+++ doc/source/diagnostics/innodb_stats.rst 2013-05-28 06:50:46 +0000
@@ -0,0 +1,234 @@
1.. _innodb_stats:
2
3=====================
4 |InnoDB| Statistics
5=====================
6
7This feature provides new startup options (control method and collection of index statistics estimation) and information schema views to confirm the statistics.
8
9Version Specific Information
10============================
11
12 * :rn:`5.5.8-20.0`:
13 Renamed three fields in :table:`INNODB_INDEX_STATS` table.
14
15
16System Variables
17================
18
19Four new system variables were introduced by this feature.
20
21.. variable:: innodb_stats_method
22
23 :cli: YES
24 :configfile: YES
25 :scope: GLOBAL
26 :dyn: YES
27 :type: STRING
28 :default: ``nulls_equal``
29 :allowed: ``nulls_equal``, ``nulls_unequal``, ``nulls_ignored``
30
31The values and meanings are almost same to ``myisam_stats_method`` option of native |MySQL| (``nulls_equal``, ``nulls_unequal``, ``nulls_ignored``). But |InnoDB| doesn't have several patterns of statistics currently. Even though this option can be changed dynamically, statistics needs to be re-calculated to change the method for the table.
32
33(reference: `MyISAM Index Statistics Collection <http://dev.mysql.com/doc/refman/5.5/en/myisam-index-statistics.html>`_)
34
35.. variable:: innodb_stats_auto_update
36
37 :type: BOOLEAN
38 :default: 1
39
40|InnoDB| updates the each index statistics automatically (many updates were done, some information_schema is accessed, table monitor, etc.). Setting this option 0 can stop these automatic recalculation of the statistics except for "first open" and "ANALYZE TABLE command".
41
42
43.. variable:: innodb_stats_update_need_lock
44
45 :type: BOOLEAN
46 :default: 1
47
48If you meet contention of ``&dict_operation_lock``, setting 0 reduces the contention. But 0 disables to update ``Data_free:`` of ``SHOW TABLE STATUS``.
49
50
51.. variable:: innodb_use_sys_stats_table
52
53 :type: BOOLEAN
54 :default: 0
55
56
57If this option is enabled, |XtraDB| uses the ``SYS_STATS`` system table to store statistics of table indexes. Also, when |InnoDB| opens a table for the first time, it loads the statistics from ``SYS_STATS`` instead of sampling index pages. If you use a high ``stats_sample_pages`` value, the first open of a table is expensive. In such a case, this option will help. Intended behavior is to never update statistics unless an explicit ``ANALYZE TABLE`` is issued.
58
59INFORMATION_SCHEMA Tables
60=========================
61
62.. table:: INFORMATION_SCHEMA.INNODB_SYS_STATS
63
64 Shows statistics of table indexes.
65
66 :column INDEX_ID: Index ID
67 :column KEY_COLS: Number of key columns
68 :column DIFF_VALS: Number of Different Values
69 :column NON_NULL_VALS: Number of Non ``NULL`` Values
70
71.. table:: INFORMATION_SCHEMA.INNODB_SYS_TABLES
72
73 Shows the information about |InnoDB| tables
74
75 :column TABLE_ID: Table ID
76 :column SCHEMA: Database (schema) name
77 :column NAME: Table name
78 :column FLAG: Contains `0` if it is a InnoDB system table or `1` it is a user table
79 :column N_COLS: Number of columns in the table
80 :column SPACE: Tablespace ID
81
82.. table:: INFORMATION_SCHEMA.INNODB_SYS_TABLESTATS
83
84 Shows the information about the performance statistics of |InnoDB| tables.
85
86 :column TABLE_ID: Table ID
87 :column SCHEMA: Database (schema) Name
88 :column NAME: Table Name
89 :column STATS_INITIALIZED: Contains ``Initialized`` value if the statistics are collected or ``Uninitialized`` if they are not collected.
90 :column NUM_ROWS: Estimated number of rows in the table.
91 :column CLUST_INDEX_SIZE: Number of pages on disk that store the clustered index.
92 :column OTHER_INDEX_SIZE: Number of pages on disk that store all secondary indexes.
93 :column MODIFIED_COUNTER: Number of rows modified by DML operations.
94 :column AUTOINC:
95 :column MYSQL_HANDLES_OPENED:
96
97.. table:: INFORMATION_SCHEMA.INNODB_SYS_INDEXES
98
99 Shows the information about |InnoDB| indexes
100
101 :column INDEX_ID: Index ID
102 :column NAME: Index Name
103 :column TABLE_ID: Table ID
104 :column TYPE: Numeric identifier signifying the index type
105 :column N_FIELDS: Number of columns in the index
106 :column PAGE_NO: Page offset within its tablespace
107 :column SPACE: Tablespace ID
108
109.. table:: INFORMATION_SCHEMA.INNODB_SYS_COLUMNS
110
111 Shows the information about the |InnoDB| table columns
112
113 :column TABLE_ID: Table ID
114 :column NAME: Column Name
115 :column POS: Position of the column inside the table.
116 :column MTYPE: Numeric identifier for the column type.
117 :column PRTYPE: Binary value with bits representing data type, character set code and nullability.
118 :column LEN: Column length.
119
120.. table:: INFORMATION_SCHEMA.INNODB_SYS_FIELDS
121
122 Shows the information about the |InnoDB| index key fields.
123
124 :column INDEX_ID: Index ID
125 :column NAME: Index Name
126 :column POS: Position of the field inside the index.
127
128.. table:: INFORMATION_SCHEMA.INNODB_SYS_FOREIGN
129
130 Shows the information about the |InnoDB| foreign keys.
131
132 :column ID: Foreign Key ID
133 :column FOR_NAME: Database/Table which contains the Foreign Key
134 :column FOR_REF: Database/Table being referenced by the Foreign Key
135 :column N_COLS: Number of columns in the foreign key.
136 :column TYPE: Type of foreign key, represented by the bit flags.
137
138.. table:: INFORMATION_SCHEMA.INNODB_SYS_FOREIGN_COLS
139
140 Shows the information about the columns of the |InnoDB| foreign keys.
141
142 :column ID: Foreign Key ID
143 :column FOR_COL_NAME: Foreign Key Column Name
144 :column FOR_REF: Referenced Column Name
145 :column POS: Position of the field inside the index.
146
147.. table:: INFORMATION_SCHEMA.INNODB_TABLE_STATS
148
149 Shows table statistics information of dictionary cached.
150
151 :column table_schema: Database name of the table.
152 :column table_name: Table name.
153 :column rows: estimated number of all rows.
154 :column clust_size: cluster index (table/primary key) size in number of pages.
155 :column other_size: Other index (non primary key) size in number of pages.
156 :column modified: Internal counter to judge whether statistics recalculation should be done.
157
158If the value of modified column exceeds "rows / 16" or 2000000000, the statistics recalculation is done when ``innodb_stats_auto_update == 1``. We can estimate the oldness of the statistics by this value.
159
160.. table:: INFORMATION_SCHEMA.INNODB_INDEX_STATS
161
162 Shows index statistics information of dictionary cached.
163
164 :column table_schema: Database name of the table.
165 :column table_name: Table name.
166 :column index_name: Index name.
167 :column fields: How many fields the index key has. (it is internal structure of |InnoDB|, it may be larger than the ``CREATE TABLE``).
168 :column rows_per_key: Estimate rows per 1 key value. ([1 column value], [2 columns value], [3 columns value], ...).
169 :column index_total_pages: Number of index pages.
170 :column index_leaf_pages: Number of leaf pages.
171
172In releases before 5.5.8-20.0, these fields had different names:
173
174 * ``rows_per_key`` was ``row_per_keys``
175
176 * ``index_total_pages`` was ``index_size``
177
178 * ``index_leaf_pages`` was ``leaf_pages``
179
180Example
181=======
182
183``[innodb_stats_method = nulls_equal (default behavior of InnoDB)]`` ::
184
185 mysql> explain SELECT COUNT(*), 0 FROM orgs2 orgs LEFT JOIN sa_opportunities2 sa_opportunities ON orgs.org_id=sa_opportunities.org_id LEFT JOIN contacts2 contacts ON orgs.org_id=contacts.org_id;
186 +----+-------------+------------------+-------+-----------------+-----------------+---------+-------------------+-------+-------------+
187 | id | select_type | table | type | possible_keys | key | key_len | ref | rows | Extra |
188 +----+-------------+------------------+-------+-----------------+-----------------+---------+-------------------+-------+-------------+
189 | 1 | SIMPLE | orgs | index | NULL | orgs$org_id | 4 | NULL | 128 | Using index |
190 | 1 | SIMPLE | sa_opportunities | ref | sa_opp$org_id | sa_opp$org_id | 5 | test2.orgs.org_id | 5751 | Using index |
191 | 1 | SIMPLE | contacts | ref | contacts$org_id | contacts$org_id | 5 | test2.orgs.org_id | 23756 | Using index |
192 +----+-------------+------------------+-------+-----------------+-----------------+---------+-------------------+-------+-------------+
193 3 rows in set (0.00 sec)
194
195``[innodb_stats_method = nulls_unequal or nulls_ignored]`` ::
196
197 mysql> explain SELECT COUNT(*), 0 FROM orgs2 orgs LEFT JOIN sa_opportunities2 sa_opportunities ON orgs.org_id=sa_opportunities.org_id LEFT JOIN contacts2 contacts ON orgs.org_id=contacts.org_id;
198 +----+-------------+------------------+-------+-----------------+-----------------+---------+-------------------+------+-------------+
199 | id | select_type | table | type | possible_keys | key | key_len | ref | rows | Extra |
200 +----+-------------+------------------+-------+-----------------+-----------------+---------+-------------------+------+-------------+
201 | 1 | SIMPLE | orgs | index | NULL | orgs$org_id | 4 | NULL | 128 | Using index |
202 | 1 | SIMPLE | sa_opportunities | ref | sa_opp$org_id | sa_opp$org_id | 5 | test2.orgs.org_id | 1 | Using index |
203 | 1 | SIMPLE | contacts | ref | contacts$org_id | contacts$org_id | 5 | test2.orgs.org_id | 1 | Using index |
204 +----+-------------+------------------+-------+-----------------+-----------------+---------+-------------------+------+-------------+
205 3 rows in set (0.00 sec)
206 <example of information_schema>
207
208 mysql> select * from information_schema.innodb_table_stats;
209 +------------------------+-------+------------+------------+----------+
210 | table_name | rows | clust_size | other_size | modified |
211 +------------------------+-------+------------+------------+----------+
212 | test/sa_opportunities2 | 11175 | 21 | 11 | 0 |
213 | test/orgs2 | 128 | 1 | 0 | 0 |
214 | test/contacts2 | 47021 | 97 | 97 | 0 |
215 +------------------------+-------+------------+------------+----------+
216 3 rows in set (0.00 sec)
217
218 mysql> select * from information_schema.innodb_index_stats;
219 +------------------------+-----------------+--------+--------------+------------+------------+
220 | table_name | index_name | fields | row_per_keys | index_size | leaf_pages |
221 +------------------------+-----------------+--------+--------------+------------+------------+
222 | test/sa_opportunities2 | GEN_CLUST_INDEX | 1 | 1 | 21 | 20 |
223 | test/sa_opportunities2 | sa_opp$org_id | 2 | 338, 1 | 11| 10 |
224 | test/orgs2 | orgs$org_id | 1 | 1 | 1 | 1 |
225 | test/contacts2 | GEN_CLUST_INDEX | 1 | 1 | 97 | 80 |
226 | test/contacts2 | contacts$org_id | 2 | 516, 0 | 97 | 37 |
227 +------------------------+-----------------+--------+--------------+------------+------------+
228 5 rows in set (0.00 sec)
229
230Other reading
231=============
232
233 * `InnoDB Table/Index stats <http://www.mysqlperformanceblog.com/2010/03/20/InnoDB-tableindex-stats/>`_
234
0235
=== removed file 'doc/source/flexibility/innodb_fast_shutdown.rst'
--- doc/source/flexibility/innodb_fast_shutdown.rst 2013-05-28 06:50:44 +0000
+++ doc/source/flexibility/innodb_fast_shutdown.rst 1970-01-01 00:00:00 +0000
@@ -1,36 +0,0 @@
1.. _innodb_fast_shutdown:
2
3===============
4 Fast Shutdown
5===============
6
7Some |InnoDB| / |XtraDB| threads which perform various background activities are in the sleep state most of the time. They only wake up every few seconds to perform their tasks. They also check whether the server is in the shutdown phase, and if not, they go to the sleep state again. That means there could be a noticeable delay (up to 10 seconds) after a shutdown command and before all |InnoDB| / |XtraDB| threads actually notice this and terminate. This is not a big problem for most production servers, because a shutdown of a heavily loaded server normally takes much longer than 10 seconds.
8
9The problem, however, had a significant impact on running the regression test suite, because it performs a lot of server restarts during its execution and also because there is not so much to do when shutting a test server. So it makes even less sense to wait up to 10 seconds.
10
11This change modifies that behavior to make the sleep waiting interruptible, so that when the server is told to shutdown, threads no longer wait until the end of their sleep interval. This results in a measurably faster test suite execution (~40% in some cases).
12
13The change was contributed by Kristian Nielsen.
14
15Version Specific Information
16============================
17
18 * :rn:`5.5.8-20.0`
19 Full functionality available.
20
21Other Information
22=================
23
24 * Author / Origin:
25 Kristian Nielsen
26
27 * Bugs fixed:
28 :bug:`643463`
29
30Other reading
31=============
32
33 * `How to decrease InnoDB shutdown times <http://www.mysqlperformanceblog.com/2009/04/15/how-to-decrease-innodb-shutdown-times/>`_
34
35 * `How long InnoDB shutdown may take <http://www.mysqlperformanceblog.com/2010/09/02/how-long-innodb-shutdown-may-take/>`_
36
370
=== removed file 'doc/source/management/innodb_expand_import.rst'
--- doc/source/management/innodb_expand_import.rst 2013-05-28 06:50:44 +0000
+++ doc/source/management/innodb_expand_import.rst 1970-01-01 00:00:00 +0000
@@ -1,160 +0,0 @@
1.. _innodb_expand_import_page:
2
3===================
4Expand Table Import
5===================
6
7Unlike MyISAM, |InnoDB| does not allow users to copy datafiles for a single table between servers. If exported with XtraBackup, a table can now be imported on another server running |XtraDB|.
8
9This feature implements the abililty to import arbitrary .ibd files exported using the XtraBackup ``--export`` option. The :variable:`innodb_expand_import` variable makes to convert ``.ibd`` file during import process.
10
11The normal version can import only the backed-up .ibd file at the same place.
12
13.. note::
14
15 This feature is unsupported with InnoDB data files created with MySQL 5.0 and MySQL 5.1 prior to version 5.1.7 due to InnoDB file format limitation. It may work in some cases, but may result in crashes on import as well, see bug :bug:`1000221` and bug :bug:`727704` for examples and details.
16
17|Percona Server| :rn:`5.5.28-29.2` extended the ``innochecksum`` with an option :option:`-f` to read the file format information from a given |InnoDB| data file. As only the first page needs to be read to detect the format/version information, it can also be used on a running server. Example of the output should look like this: ::
18
19 $ innochecksum -f ibdata1
20 Detected file format: Antelope (5.1.7 or newer).
21
22Example
23=======
24
25Assuming that:
26
27 * :variable:`innodb_expand_import` is set to ``1``.
28
29 * the files (``.ibd`` and ``.exp``) are prepared by the ``xtrabackup --prepare --export`` command.
30
31First create “exactly same” structured tables to the target database.
32
33Then discard the tables as preparation of import, for example, ::
34
35 mysql> set FOREIGN_KEY_CHECKS=0;
36 Query OK, 0 rows affected (0.00 sec)
37
38 mysql> alter table customer discard tablespace;
39 Query OK, 0 rows affected (0.01 sec)
40
41 mysql> alter table district discard tablespace;
42 Query OK, 0 rows affected (0.01 sec)
43
44 mysql> alter table history discard tablespace;
45 Query OK, 0 rows affected (0.00 sec)
46
47 ...
48 put the .ibd and .exp files at the same place to .frm file.
49 import the tables
50 (command example)
51 mysql> set FOREIGN_KEY_CHECKS=0;
52 Query OK, 0 rows affected (0.00 sec)
53
54 mysql> set global innodb_expand_import=1;
55 Query OK, 0 rows affected (0.00 sec)
56
57 mysql> alter table customer import tablespace;
58 Query OK, 0 rows affected (0.17 sec)
59
60 mysql> alter table district import tablespace;
61 Query OK, 0 rows affected (0.00 sec)
62
63 mysql> alter table history import tablespace;
64 Query OK, 0 rows affected (0.04 sec)
65
66 ...
67 (.err file example)
68 InnoDB: import: extended import of tpcc2/customer is started.
69 InnoDB: import: 2 indexes are detected.
70 InnoDB: Progress in %: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 done.
71 InnoDB: import: extended import of tpcc2/district is started.
72 InnoDB: import: 1 indexes are detected.
73 InnoDB: Progress in %: 16 33 50 66 83 100 done.
74 InnoDB: import: extended import of tpcc2/history is started.
75 InnoDB: import: 3 indexes are detected.
76 InnoDB: Progress in %: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 done.
77 ...
78
79Version Specific Information
80============================
81
82 * 5.5.10-20.1:
83 Renamed variable :variable:`innodb_expand_import` to :variable:`innodb_import_table_from_xtrabackup`.
84
85System Variables
86================
87
88.. variable:: innodb_expand_import
89
90 :version 5.5.10-20.1: Renamed.
91 :cli: Yes
92 :conf: Yes
93 :scope: Global
94 :dyn: Yes
95 :vartype: ULONG
96 :default: 0
97 :range: 0-1
98
99If set to 1, ``.ibd`` file is converted (``space id``, ``index id``, etc.) with index information in ``.exp`` file during the import process (``ALTER TABLE ... IMPORT TABLESPACE`` command).
100
101 This variable was renamed to :variable:`innodb_import_table_from_xtrabackup`, beginning in release 5.5.10-20.1. It still exists as :variable:`innodb_expand_import` in versions prior to that.
102
103
104.. variable:: innodb_import_table_from_xtrabackup
105
106 :version 5.5.10-20.1: Introduced.
107 :cli: Yes
108 :conf: Yes
109 :scope: Global
110 :dyn: Yes
111 :vartype: ULONG
112 :default: 0
113 :range: 0-1
114
115If set to 1, ``.ibd`` file is converted (``space id``, ``index id``, etc.) with index information in .exp file during the import process (``ALTER TABLE ... IMPORT TABLESPACE`` command).
116
117 This variable was added in release 5.5.10-20.1. Prior to that, it was named :variable:`innodb_expand_import`, which still exists in earlier versions.
118
119
120.. Other Information
121
122
123.. TODO
124
125.. Make |XtraDB| to be enable to export .exp file by itself.
126
127.. Suggestion 2 (expand "alter table ... discard tablespace")
128.. New variable “innodb_export_at_discard = [0|1]”. When 1, |XtraDB| close the tablespace cleanly (no data in insertbuffer or to purge) and output .exp file at the same place to the .ibd file instead of deleting .ibd file only (default behavior), when “ALTER TABLE … DISCARD TABLESPACE”.
129
130.. I think The default value should be 1 for safety, because 0 deletes the table data… LOCK TABLE also may be needed before the operation (error when doesn``t have LOCK?).
131
132.. (example: move database named ``example``)
133
134.. Source: (innodb_export_at_discard should be 1)
135
136.. lock all tables in the database ``example``
137.. "ALTER TABLE ... DISCARD TABLESPACE" for all tables in ``exmple``
138.. unlock all tables in the database ``example``
139.. (and we need to get all create table clause (e.g. "mysqldump --no-data"))
140.. obtain *.ibd *.exp as exported files
141.. Target: (innodb_expand_import should be 1)
142
143.. create all tables in ``example``
144.. "ALTER TABLE ... DISCARD TABLESPACE" for all tables in ``exmple``
145.. overwrite *.ibd and put *.exp from the Target
146.. "ALTER TABLE ... IMPORT TABLESPACE" for all tables in ``exmple``
147.. I think making the shell to do the above operations automatically is much easier than implement the new SQLs to do them…
148
149.. Suggestion 1 (at shutdown [too simple... **rejected**...])
150.. New variable “innodb_export_exp_at_shutdown = [0|1]”. When 1, |XtraDB| outputs .exp files for all |InnoDB| tables at clean shutdown. (works file_per_table mode inly)
151
152.. XtraDB must treat also .exp files along with .ibd files. (e.g. delete files when delete table)
153
154
155Other reading
156=============
157
158 * `Moving InnoDB tables between servers <http://www.mysqlperformanceblog.com/2009/06/08/impossible-possible-moving-innodb-tables-between-servers/>`_
159
160 * `Copying InnoDB tables between servers <http://www.mysqlperformanceblog.com/2009/07/31/copying-innodb-tables-between-servers/>`_
1610
=== removed file 'doc/source/percona-xtradb.png'
162Binary files doc/source/percona-xtradb.png 2013-05-28 06:50:44 +0000 and doc/source/percona-xtradb.png 1970-01-01 00:00:00 +0000 differ1Binary files doc/source/percona-xtradb.png 2013-05-28 06:50:44 +0000 and doc/source/percona-xtradb.png 1970-01-01 00:00:00 +0000 differ
=== added directory 'python-for-subunit2junitxml'
=== added file 'python-for-subunit2junitxml/BytesIO.py'
--- python-for-subunit2junitxml/BytesIO.py 1970-01-01 00:00:00 +0000
+++ python-for-subunit2junitxml/BytesIO.py 2013-05-28 06:50:46 +0000
@@ -0,0 +1,136 @@
1
2# http://wiki.python.org/moin/BytesIO
3#
4# A skeleton one used for systems that don't have BytesIO.
5#
6# It's enough for subunit at least....
7
8class BytesIO(object):
9 """ A file-like API for reading and writing bytes objects.
10
11 Mostly like StringIO, but write() calls modify the underlying
12 bytes object.
13
14 >>> b = bytes()
15 >>> f = BytesIO(b, 'w')
16 >>> f.write(bytes.fromhex('ca fe ba be'))
17 >>> f.write(bytes.fromhex('57 41 56 45'))
18 >>> b
19 bytes([202, 254, 186, 190, 87, 65, 86, 69])
20 """
21
22 def __init__(self, buf, mode='r'):
23 """ Create a new BytesIO for reading or writing the given buffer.
24
25 buf - Back-end buffer for this BytesIO. A bytes object.
26 Actually, anything that supports len(), slice-assignment,
27 and += will work.
28 mode - One of 'r', 'w', 'a'.
29 An optional 'b' is also allowed, but it doesn't do anything.
30 """
31 # XXX many 'mode' possibilities aren't allowed yet: 'rw+Ut'
32 if len(mode) == 2 and mode[-1] == 'b':
33 mode = mode[:-1] # binary mode goes without saying
34 if mode not in ('r', 'w', 'a'):
35 raise ValueError("mode must be 'r', 'w', or 'a'")
36
37 self._buf = buf
38 self.mode = mode
39 self.closed = False
40 if self.mode == 'w':
41 del buf[:]
42 self._point = 0
43 elif self.mode == 'r':
44 self._point = 0
45 else: # 'a'
46 self._point = len(buf)
47
48 def close(self):
49 self.closed = True
50
51 def _check_closed(self):
52 if self.closed:
53 raise ValueError("file is closed")
54
55 def flush(self):
56 self._check_closed()
57
58 def next(self):
59 line = self.readline()
60 if len(line) == 0:
61 raise StopIteration
62 return line
63
64 def read(self, size=None):
65 self._check_closed()
66 if size is None:
67 e = len(self._buf)
68 else:
69 e = min(self._point + size, len(self._buf))
70 r = self._buf[self._point:e]
71 self._point = e
72 return r
73
74 def readline(self, size=None):
75 self._check_closed()
76 die # XXX TODO - assume ascii and read a line
77
78 def readlines(self, sizehint=None):
79 # XXX TODO handle sizehint
80 return list(self)
81
82 def seek(self, offset, whence=0):
83 self._check_closed()
84
85 if whence == 0:
86 self._point = offset
87 elif whence == 1:
88 self._point += offset
89 elif whence == 2:
90 self._point = len(self._buf) + offset
91 else:
92 raise ValueError("whence must be 0, 1, or 2")
93
94 if self._point < 0:
95 self._point = 0 # XXX is this right?
96
97 def tell(self):
98 self._check_closed()
99 return self._point
100
101 def truncate(self, size=None):
102 self._check_closed()
103 if size is None:
104 size = self.tell()
105 del self._buf[size:]
106
107 def write(self, data):
108 self._check_closed()
109 amt = len(data)
110 size = len(self._buf)
111 if self.mode == 'a':
112 self._point = size
113
114 if self._point > size:
115 if isinstance(b, bytes):
116 blank = bytes([0])
117 else:
118 # Don't know what default value to insert, unfortunately
119 raise ValueError("can't write past the end of this object")
120 self._buf += blank * (self._point - size) + data
121 self._point = len(self._buf)
122 else:
123 p = self._point
124 self._buf[p:p + amt] = data
125 self._point = min(p + amt, len(self._buf))
126
127 def writelines(self, seq):
128 for line in seq:
129 self.write(line)
130
131 def __iter__(self):
132 return self
133
134 @property
135 def name(self):
136 return repr(self)
0137
=== added directory 'python-for-subunit2junitxml/iso8601'
=== added file 'python-for-subunit2junitxml/iso8601/LICENSE'
--- python-for-subunit2junitxml/iso8601/LICENSE 1970-01-01 00:00:00 +0000
+++ python-for-subunit2junitxml/iso8601/LICENSE 2013-05-28 06:50:46 +0000
@@ -0,0 +1,20 @@
1Copyright (c) 2007 Michael Twomey
2
3Permission is hereby granted, free of charge, to any person obtaining a
4copy of this software and associated documentation files (the
5"Software"), to deal in the Software without restriction, including
6without limitation the rights to use, copy, modify, merge, publish,
7distribute, sublicense, and/or sell copies of the Software, and to
8permit persons to whom the Software is furnished to do so, subject to
9the following conditions:
10
11The above copyright notice and this permission notice shall be included
12in all copies or substantial portions of the Software.
13
14THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
15OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
17IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
18CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
19TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
20SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
021
=== added file 'python-for-subunit2junitxml/iso8601/README'
--- python-for-subunit2junitxml/iso8601/README 1970-01-01 00:00:00 +0000
+++ python-for-subunit2junitxml/iso8601/README 2013-05-28 06:50:46 +0000
@@ -0,0 +1,26 @@
1A simple package to deal with ISO 8601 date time formats.
2
3ISO 8601 defines a neutral, unambiguous date string format, which also
4has the property of sorting naturally.
5
6e.g. YYYY-MM-DDTHH:MM:SSZ or 2007-01-25T12:00:00Z
7
8Currently this covers only the most common date formats encountered, not
9all of ISO 8601 is handled.
10
11Currently the following formats are handled:
12
13* 2006-01-01T00:00:00Z
14* 2006-01-01T00:00:00[+-]00:00
15
16I'll add more as I encounter them in my day to day life. Patches with
17new formats and tests will be gratefully accepted of course :)
18
19References:
20
21* http://www.cl.cam.ac.uk/~mgk25/iso-time.html - simple overview
22
23* http://hydracen.com/dx/iso8601.htm - more detailed enumeration of
24 valid formats.
25
26See the LICENSE file for the license this package is released under.
027
=== added file 'python-for-subunit2junitxml/iso8601/README.subunit'
--- python-for-subunit2junitxml/iso8601/README.subunit 1970-01-01 00:00:00 +0000
+++ python-for-subunit2junitxml/iso8601/README.subunit 2013-05-28 06:50:46 +0000
@@ -0,0 +1,5 @@
1This is a [slightly rearranged] import of http://pypi.python.org/pypi/iso8601/
2version 0.1.4. The OS X hidden files have been stripped, and the package
3turned into a single module, to simplify installation. The remainder of the
4source distribution is included in the subunit source tree at python/iso8601
5for reference.
06
=== added file 'python-for-subunit2junitxml/iso8601/setup.py'
--- python-for-subunit2junitxml/iso8601/setup.py 1970-01-01 00:00:00 +0000
+++ python-for-subunit2junitxml/iso8601/setup.py 2013-05-28 06:50:46 +0000
@@ -0,0 +1,58 @@
1try:
2 from setuptools import setup
3except ImportError:
4 from distutils import setup
5
6long_description="""Simple module to parse ISO 8601 dates
7
8This module parses the most common forms of ISO 8601 date strings (e.g.
92007-01-14T20:34:22+00:00) into datetime objects.
10
11>>> import iso8601
12>>> iso8601.parse_date("2007-01-25T12:00:00Z")
13datetime.datetime(2007, 1, 25, 12, 0, tzinfo=<iso8601.iso8601.Utc ...>)
14>>>
15
16Changes
17=======
18
190.1.4
20-----
21
22* The default_timezone argument wasn't being passed through correctly,
23 UTC was being used in every case. Fixes issue 10.
24
250.1.3
26-----
27
28* Fixed the microsecond handling, the generated microsecond values were
29 way too small. Fixes issue 9.
30
310.1.2
32-----
33
34* Adding ParseError to __all__ in iso8601 module, allows people to import it.
35 Addresses issue 7.
36* Be a little more flexible when dealing with dates without leading zeroes.
37 This violates the spec a little, but handles more dates as seen in the
38 field. Addresses issue 6.
39* Allow date/time separators other than T.
40
410.1.1
42-----
43
44* When parsing dates without a timezone the specified default is used. If no
45 default is specified then UTC is used. Addresses issue 4.
46"""
47
48setup(
49 name="iso8601",
50 version="0.1.4",
51 description=long_description.split("\n")[0],
52 long_description=long_description,
53 author="Michael Twomey",
54 author_email="micktwomey+iso8601@gmail.com",
55 url="http://code.google.com/p/pyiso8601/",
56 packages=["iso8601"],
57 license="MIT",
58)
059
=== added file 'python-for-subunit2junitxml/iso8601/test_iso8601.py'
--- python-for-subunit2junitxml/iso8601/test_iso8601.py 1970-01-01 00:00:00 +0000
+++ python-for-subunit2junitxml/iso8601/test_iso8601.py 2013-05-28 06:50:46 +0000
@@ -0,0 +1,111 @@
1import iso8601
2
3def test_iso8601_regex():
4 assert iso8601.ISO8601_REGEX.match("2006-10-11T00:14:33Z")
5
6def test_timezone_regex():
7 assert iso8601.TIMEZONE_REGEX.match("+01:00")
8 assert iso8601.TIMEZONE_REGEX.match("+00:00")
9 assert iso8601.TIMEZONE_REGEX.match("+01:20")
10 assert iso8601.TIMEZONE_REGEX.match("-01:00")
11
12def test_parse_date():
13 d = iso8601.parse_date("2006-10-20T15:34:56Z")
14 assert d.year == 2006
15 assert d.month == 10
16 assert d.day == 20
17 assert d.hour == 15
18 assert d.minute == 34
19 assert d.second == 56
20 assert d.tzinfo == iso8601.UTC
21
22def test_parse_date_fraction():
23 d = iso8601.parse_date("2006-10-20T15:34:56.123Z")
24 assert d.year == 2006
25 assert d.month == 10
26 assert d.day == 20
27 assert d.hour == 15
28 assert d.minute == 34
29 assert d.second == 56
30 assert d.microsecond == 123000
31 assert d.tzinfo == iso8601.UTC
32
33def test_parse_date_fraction_2():
34 """From bug 6
35
36 """
37 d = iso8601.parse_date("2007-5-7T11:43:55.328Z'")
38 assert d.year == 2007
39 assert d.month == 5
40 assert d.day == 7
41 assert d.hour == 11
42 assert d.minute == 43
43 assert d.second == 55
44 assert d.microsecond == 328000
45 assert d.tzinfo == iso8601.UTC
46
47def test_parse_date_tz():
48 d = iso8601.parse_date("2006-10-20T15:34:56.123+02:30")
49 assert d.year == 2006
50 assert d.month == 10
51 assert d.day == 20
52 assert d.hour == 15
53 assert d.minute == 34
54 assert d.second == 56
55 assert d.microsecond == 123000
56 assert d.tzinfo.tzname(None) == "+02:30"
57 offset = d.tzinfo.utcoffset(None)
58 assert offset.days == 0
59 assert offset.seconds == 60 * 60 * 2.5
60
61def test_parse_invalid_date():
62 try:
63 iso8601.parse_date(None)
64 except iso8601.ParseError:
65 pass
66 else:
67 assert 1 == 2
68
69def test_parse_invalid_date2():
70 try:
71 iso8601.parse_date("23")
72 except iso8601.ParseError:
73 pass
74 else:
75 assert 1 == 2
76
77def test_parse_no_timezone():
78 """issue 4 - Handle datetime string without timezone
79
80 This tests what happens when you parse a date with no timezone. While not
81 strictly correct this is quite common. I'll assume UTC for the time zone
82 in this case.
83 """
84 d = iso8601.parse_date("2007-01-01T08:00:00")
85 assert d.year == 2007
86 assert d.month == 1
87 assert d.day == 1
88 assert d.hour == 8
89 assert d.minute == 0
90 assert d.second == 0
91 assert d.microsecond == 0
92 assert d.tzinfo == iso8601.UTC
93
94def test_parse_no_timezone_different_default():
95 tz = iso8601.FixedOffset(2, 0, "test offset")
96 d = iso8601.parse_date("2007-01-01T08:00:00", default_timezone=tz)
97 assert d.tzinfo == tz
98
99def test_space_separator():
100 """Handle a separator other than T
101
102 """
103 d = iso8601.parse_date("2007-06-23 06:40:34.00Z")
104 assert d.year == 2007
105 assert d.month == 6
106 assert d.day == 23
107 assert d.hour == 6
108 assert d.minute == 40
109 assert d.second == 34
110 assert d.microsecond == 0
111 assert d.tzinfo == iso8601.UTC
0112
=== added directory 'python-for-subunit2junitxml/junitxml'
=== added file 'python-for-subunit2junitxml/junitxml/__init__.py'
--- python-for-subunit2junitxml/junitxml/__init__.py 1970-01-01 00:00:00 +0000
+++ python-for-subunit2junitxml/junitxml/__init__.py 2013-05-28 06:50:46 +0000
@@ -0,0 +1,221 @@
1#
2# junitxml: extensions to Python unittest to get output junitxml
3# Copyright (C) 2009 Robert Collins <robertc@robertcollins.net>
4#
5# Copying permitted under the LGPL-3 licence, included with this library.
6
7"""unittest compatible JUnit XML output."""
8
9
10import datetime
11import re
12import time
13import unittest
14
15# same format as sys.version_info: "A tuple containing the five components of
16# the version number: major, minor, micro, releaselevel, and serial. All
17# values except releaselevel are integers; the release level is 'alpha',
18# 'beta', 'candidate', or 'final'. The version_info value corresponding to the
19# Python version 2.0 is (2, 0, 0, 'final', 0)." Additionally we use a
20# releaselevel of 'dev' for unreleased under-development code.
21#
22# If the releaselevel is 'alpha' then the major/minor/micro components are not
23# established at this point, and setup.py will use a version of next-$(revno).
24# If the releaselevel is 'final', then the tarball will be major.minor.micro.
25# Otherwise it is major.minor.micro~$(revno).
26__version__ = (0, 7, 0, 'alpha', 0)
27
28
29def test_suite():
30 import junitxml.tests
31 return junitxml.tests.test_suite()
32
33
34class LocalTimezone(datetime.tzinfo):
35
36 def __init__(self):
37 self._offset = None
38
39 # It seems that the minimal possible implementation is to just return all
40 # None for every function, but then it breaks...
41 def utcoffset(self, dt):
42 if self._offset is None:
43 t = 1260423030 # arbitrary, but doesn't handle dst very well
44 dt = datetime.datetime
45 self._offset = (dt.fromtimestamp(t) - dt.utcfromtimestamp(t))
46 return self._offset
47
48 def dst(self, dt):
49 return datetime.timedelta(0)
50
51 def tzname(self, dt):
52 return None
53
54
55def _error_name(eclass):
56 module = eclass.__module__
57 if module not in ("__main__", "builtins", "exceptions"):
58 return ".".join([module, eclass.__name__])
59 return eclass.__name__
60
61
62_non_cdata = "[\0-\b\x0B-\x1F\uD800-\uDFFF\uFFFE\uFFFF]+"
63if "\\u" in _non_cdata:
64 _non_cdata = _non_cdata.decode("unicode-escape")
65 def _strip_invalid_chars(s, _sub=re.compile(_non_cdata, re.UNICODE).sub):
66 if not isinstance(s, unicode):
67 try:
68 s = s.decode("utf-8")
69 except UnicodeDecodeError:
70 s = s.decode("ascii", "replace")
71 return _sub("", s).encode("utf-8")
72else:
73 def _strip_invalid_chars(s, _sub=re.compile(_non_cdata, re.UNICODE).sub):
74 return _sub("", s)
75def _escape_content(s):
76 return (_strip_invalid_chars(s)
77 .replace("&", "&amp;")
78 .replace("<", "&lt;")
79 .replace("]]>", "]]&gt;"))
80def _escape_attr(s):
81 return (_strip_invalid_chars(s)
82 .replace("&", "&amp;")
83 .replace("<", "&lt;")
84 .replace("]]>", "]]&gt;")
85 .replace('"', "&quot;")
86 .replace("\t", "&#x9;")
87 .replace("\n", "&#xA;"))
88
89
90class JUnitXmlResult(unittest.TestResult):
91 """A TestResult which outputs JUnit compatible XML."""
92
93 def __init__(self, stream):
94 """Create a JUnitXmlResult.
95
96 :param stream: A stream to write results to. Note that due to the
97 nature of JUnit XML output, nnothing will be written to the stream
98 until stopTestRun() is called.
99 """
100 self.__super = super(JUnitXmlResult, self)
101 self.__super.__init__()
102 # GZ 2010-09-03: We have a problem if passed a text stream in Python 3
103 # as really we want to write raw UTF-8 to ensure that
104 # the encoding is not mangled later
105 self._stream = stream
106 self._results = []
107 self._set_time = None
108 self._test_start = None
109 self._run_start = None
110 self._tz_info = None
111
112 def startTestRun(self):
113 """Start a test run."""
114 self._run_start = self._now()
115
116 def _get_tzinfo(self):
117 if self._tz_info is None:
118 self._tz_info = LocalTimezone()
119 return self._tz_info
120
121 def _now(self):
122 if self._set_time is not None:
123 return self._set_time
124 else:
125 return datetime.datetime.now(self._get_tzinfo())
126
127 def time(self, a_datetime):
128 self._set_time = a_datetime
129 if (self._run_start is not None and
130 self._run_start > a_datetime):
131 self._run_start = a_datetime
132
133 def startTest(self, test):
134 self.__super.startTest(test)
135 self._test_start = self._now()
136
137 def _duration(self, from_datetime):
138 try:
139 delta = self._now() - from_datetime
140 except TypeError:
141 n = self._now()
142 delta = datetime.timedelta(-1)
143 seconds = delta.days * 3600*24 + delta.seconds
144 return seconds + 0.000001 * delta.microseconds
145
146 def _test_case_string(self, test):
147 duration = self._duration(self._test_start)
148 test_id = test.id()
149 # Split on the last dot not inside a parameter
150 class_end = test_id.rfind(".", 0, test_id.find("("))
151 if class_end == -1:
152 classname, name = "", test_id
153 else:
154 classname, name = test_id[:class_end], test_id[class_end+1:]
155 self._results.append('<testcase classname="%s" name="%s" '
156 'time="%0.3f"' % (_escape_attr(classname), _escape_attr(name), duration))
157
158 def stopTestRun(self):
159 """Stop a test run.
160
161 This allows JUnitXmlResult to output the XML representation of the test
162 run.
163 """
164 duration = self._duration(self._run_start)
165 self._stream.write('<testsuite errors="%d" failures="%d" name="" '
166 'tests="%d" time="%0.3f">\n' % (len(self.errors),
167 len(self.failures) + len(getattr(self, "unexpectedSuccesses", ())),
168 self.testsRun, duration))
169 self._stream.write(''.join(self._results))
170 self._stream.write('</testsuite>\n')
171
172 def addError(self, test, error):
173 self.__super.addError(test, error)
174 self._test_case_string(test)
175 self._results.append('>\n')
176 self._results.append('<error type="%s">%s</error>\n</testcase>\n' % (
177 _escape_attr(_error_name(error[0])),
178 _escape_content(self._exc_info_to_string(error, test))))
179
180 def addFailure(self, test, error):
181 self.__super.addFailure(test, error)
182 self._test_case_string(test)
183 self._results.append('>\n')
184 self._results.append('<failure type="%s">%s</failure>\n</testcase>\n' %
185 (_escape_attr(_error_name(error[0])),
186 _escape_content(self._exc_info_to_string(error, test))))
187
188 def addSuccess(self, test):
189 self.__super.addSuccess(test)
190 self._test_case_string(test)
191 self._results.append('/>\n')
192
193 def addSkip(self, test, reason):
194 try:
195 self.__super.addSkip(test, reason)
196 except AttributeError:
197 # Python < 2.7|3.1
198 pass
199 self._test_case_string(test)
200 self._results.append('>\n')
201 self._results.append('<skip>%s</skip>\n</testcase>\n'% _escape_attr(reason))
202
203 def addUnexpectedSuccess(self, test):
204 try:
205 self.__super.addUnexpectedSuccess(test)
206 except AttributeError:
207 # Python < 2.7|3.1
208 pass
209 self._test_case_string(test)
210 self._results.append('>\n')
211 self._results.append('<failure type="unittest.case._UnexpectedSuccess"/>\n</testcase>\n')
212
213 def addExpectedFailure(self, test, error):
214 try:
215 self.__super.addExpectedFailure(test, error)
216 except AttributeError:
217 # Python < 2.7|3.1
218 pass
219 self._test_case_string(test)
220 self._results.append('/>\n')
221
0222
=== added directory 'python-for-subunit2junitxml/junitxml/tests'
=== added file 'python-for-subunit2junitxml/junitxml/tests/__init__.py'
--- python-for-subunit2junitxml/junitxml/tests/__init__.py 1970-01-01 00:00:00 +0000
+++ python-for-subunit2junitxml/junitxml/tests/__init__.py 2013-05-28 06:50:46 +0000
@@ -0,0 +1,16 @@
1#
2# junitxml: extensions to Python unittest to get output junitxml
3# Copyright (C) 2009 Robert Collins <robertc@robertcollins.net>
4#
5# Copying permitted under the LGPL-3 licence, included with this library.
6
7import unittest
8
9from junitxml.tests import (
10 test_junitxml,
11 )
12
13def test_suite():
14 return unittest.TestLoader().loadTestsFromNames([
15 'junitxml.tests.test_junitxml',
16 ])
017
=== added file 'python-for-subunit2junitxml/junitxml/tests/test_junitxml.py'
--- python-for-subunit2junitxml/junitxml/tests/test_junitxml.py 1970-01-01 00:00:00 +0000
+++ python-for-subunit2junitxml/junitxml/tests/test_junitxml.py 2013-05-28 06:50:46 +0000
@@ -0,0 +1,327 @@
1#
2# junitxml: extensions to Python unittest to get output junitxml
3# Copyright (C) 2009 Robert Collins <robertc@robertcollins.net>
4#
5# Copying permitted under the LGPL-3 licence, included with this library.
6
7
8try:
9 from cStringIO import StringIO
10except ImportError:
11 from io import StringIO
12import datetime
13import re
14import sys
15import unittest
16import xml.dom.minidom
17
18import junitxml
19
20class TestImports(unittest.TestCase):
21
22 def test_result(self):
23 from junitxml import JUnitXmlResult
24
25
26class TestJUnitXmlResult__init__(unittest.TestCase):
27
28 def test_with_stream(self):
29 result = junitxml.JUnitXmlResult(StringIO())
30
31
32class TestJUnitXmlResult(unittest.TestCase):
33
34 def setUp(self):
35 self.output = StringIO()
36 self.result = junitxml.JUnitXmlResult(self.output)
37
38 def get_output(self):
39 output = self.output.getvalue()
40 # Collapse detailed regions into specific strings we can match on
41 return re.sub(r'(?s)<failure (.*?)>.*?</failure>',
42 r'<failure \1>failure</failure>', re.sub(
43 r'(?s)<error (.*?)>.*?</error>', r'<error \1>error</error>',
44 re.sub(r'time="\d+\.\d+"', 'time="0.000"', output)))
45
46 def run_test_or_simulate(self, test, method_name, manual_method,
47 *manual_args):
48 if getattr(test, method_name, None):
49 test.run(self.result)
50 else:
51 # older python - manually execute
52 self.result.startTest(test)
53 manual_method(test, *manual_args)
54 self.result.stopTest(test)
55
56 def test_run_duration_handles_datestamping_in_the_past(self):
57 # When used via subunit2junitxml, startTestRun is called before
58 # any tz info in the test stream has been seen.
59 # So, we use the earliest reported timestamp as the start time,
60 # replacing _test_start if needed.
61 self.result.startTestRun() # the time is now.
62 # Lose an hour (peeks inside, a little naughty but not very).
63 self.result.time(self.result._run_start - datetime.timedelta(0, 3600))
64 self.result.stopTestRun()
65 self.assertEqual("""<testsuite errors="0" failures="0" name="" tests="0" time="0.000">
66</testsuite>
67""", self.get_output())
68
69 def test_startTestRun_no_output(self):
70 # startTestRun doesn't output anything, because JUnit wants an up-front
71 # summary.
72 self.result.startTestRun()
73 self.assertEqual('', self.get_output())
74
75 def test_stopTestRun_outputs(self):
76 # When stopTestRun is called, everything is output.
77 self.result.startTestRun()
78 self.result.stopTestRun()
79 self.assertEqual("""<testsuite errors="0" failures="0" name="" tests="0" time="0.000">
80</testsuite>
81""", self.get_output())
82
83 def test_test_count(self):
84 class Passes(unittest.TestCase):
85 def test_me(self):
86 pass
87 self.result.startTestRun()
88 Passes("test_me").run(self.result)
89 Passes("test_me").run(self.result)
90 self.result.stopTestRun()
91 # When tests are run, the number of tests is counted.
92 output = self.get_output()
93 self.assertTrue('tests="2"' in output)
94
95 def test_test_id_with_parameter(self):
96 class Passes(unittest.TestCase):
97 def id(self):
98 return unittest.TestCase.id(self) + '(version_1.6)'
99 def test_me(self):
100 pass
101 self.result.startTestRun()
102 Passes("test_me").run(self.result)
103 self.result.stopTestRun()
104 output = self.get_output()
105 self.assertTrue('Passes" name="test_me(version_1.6)"' in output)
106
107 def test_erroring_test(self):
108 class Errors(unittest.TestCase):
109 def test_me(self):
110 1/0
111 self.result.startTestRun()
112 Errors("test_me").run(self.result)
113 self.result.stopTestRun()
114 self.assertEqual("""<testsuite errors="1" failures="0" name="" tests="1" time="0.000">
115<testcase classname="junitxml.tests.test_junitxml.Errors" name="test_me" time="0.000">
116<error type="ZeroDivisionError">error</error>
117</testcase>
118</testsuite>
119""", self.get_output())
120
121 def test_failing_test(self):
122 class Fails(unittest.TestCase):
123 def test_me(self):
124 self.fail()
125 self.result.startTestRun()
126 Fails("test_me").run(self.result)
127 self.result.stopTestRun()
128 self.assertEqual("""<testsuite errors="0" failures="1" name="" tests="1" time="0.000">
129<testcase classname="junitxml.tests.test_junitxml.Fails" name="test_me" time="0.000">
130<failure type="AssertionError">failure</failure>
131</testcase>
132</testsuite>
133""", self.get_output())
134
135 def test_successful_test(self):
136 class Passes(unittest.TestCase):
137 def test_me(self):
138 pass
139 self.result.startTestRun()
140 Passes("test_me").run(self.result)
141 self.result.stopTestRun()
142 self.assertEqual("""<testsuite errors="0" failures="0" name="" tests="1" time="0.000">
143<testcase classname="junitxml.tests.test_junitxml.Passes" name="test_me" time="0.000"/>
144</testsuite>
145""", self.get_output())
146
147 def test_skip_test(self):
148 class Skips(unittest.TestCase):
149 def test_me(self):
150 self.skipTest("yo")
151 self.result.startTestRun()
152 test = Skips("test_me")
153 self.run_test_or_simulate(test, 'skipTest', self.result.addSkip, 'yo')
154 self.result.stopTestRun()
155 output = self.get_output()
156 expected = """<testsuite errors="0" failures="0" name="" tests="1" time="0.000">
157<testcase classname="junitxml.tests.test_junitxml.Skips" name="test_me" time="0.000">
158<skip>yo</skip>
159</testcase>
160</testsuite>
161"""
162 self.assertEqual(expected, output)
163
164 def test_unexpected_success_test(self):
165 class Succeeds(unittest.TestCase):
166 def test_me(self):
167 pass
168 try:
169 test_me = unittest.expectedFailure(test_me)
170 except AttributeError:
171 pass # Older python - just let the test pass
172 self.result.startTestRun()
173 Succeeds("test_me").run(self.result)
174 self.result.stopTestRun()
175 output = self.get_output()
176 expected = """<testsuite errors="0" failures="1" name="" tests="1" time="0.000">
177<testcase classname="junitxml.tests.test_junitxml.Succeeds" name="test_me" time="0.000">
178<failure type="unittest.case._UnexpectedSuccess"/>
179</testcase>
180</testsuite>
181"""
182 expected_old = """<testsuite errors="0" failures="0" name="" tests="1" time="0.000">
183<testcase classname="junitxml.tests.test_junitxml.Succeeds" name="test_me" time="0.000"/>
184</testsuite>
185"""
186 if output != expected_old:
187 self.assertEqual(expected, output)
188
189 def test_expected_failure_test(self):
190 expected_failure_support = [True]
191 class ExpectedFail(unittest.TestCase):
192 def test_me(self):
193 self.fail("fail")
194 try:
195 test_me = unittest.expectedFailure(test_me)
196 except AttributeError:
197 # Older python - just let the test fail
198 expected_failure_support[0] = False
199 self.result.startTestRun()
200 ExpectedFail("test_me").run(self.result)
201 self.result.stopTestRun()
202 output = self.get_output()
203 expected = """<testsuite errors="0" failures="0" name="" tests="1" time="0.000">
204<testcase classname="junitxml.tests.test_junitxml.ExpectedFail" name="test_me" time="0.000"/>
205</testsuite>
206"""
207 expected_old = """<testsuite errors="0" failures="1" name="" tests="1" time="0.000">
208<testcase classname="junitxml.tests.test_junitxml.ExpectedFail" name="test_me" time="0.000">
209<failure type="AssertionError">failure</failure>
210</testcase>
211</testsuite>
212"""
213 if expected_failure_support[0]:
214 self.assertEqual(expected, output)
215 else:
216 self.assertEqual(expected_old, output)
217
218
219class TestWellFormedXml(unittest.TestCase):
220 """XML created should always be well formed even with odd test cases"""
221
222 def _run_and_parse_test(self, case):
223 output = StringIO()
224 result = junitxml.JUnitXmlResult(output)
225 result.startTestRun()
226 case.run(result)
227 result.stopTestRun()
228 return xml.dom.minidom.parseString(output.getvalue())
229
230 def test_failure_with_amp(self):
231 """Check the failure element content is escaped"""
232 class FailWithAmp(unittest.TestCase):
233 def runTest(self):
234 self.fail("& should be escaped as &amp;")
235 doc = self._run_and_parse_test(FailWithAmp())
236 self.assertTrue(
237 doc.getElementsByTagName("failure")[0].firstChild.nodeValue
238 .endswith("AssertionError: & should be escaped as &amp;\n"))
239
240 def test_quotes_in_test_case_id(self):
241 """Check that quotes in an attribute are escaped"""
242 class QuoteId(unittest.TestCase):
243 def id(self):
244 return unittest.TestCase.id(self) + '("quotes")'
245 def runTest(self):
246 pass
247 doc = self._run_and_parse_test(QuoteId())
248 self.assertEqual('runTest("quotes")',
249 doc.getElementsByTagName("testcase")[0].getAttribute("name"))
250
251 def test_skip_reason(self):
252 """Check the skip element content is escaped"""
253 class SkipWithLt(unittest.TestCase):
254 def runTest(self):
255 self.fail("version < 2.7")
256 try:
257 runTest = unittest.skip("2.7 <= version")(runTest)
258 except AttributeError:
259 self.has_skip = False
260 else:
261 self.has_skip = True
262 doc = self._run_and_parse_test(SkipWithLt())
263 if self.has_skip:
264 self.assertEqual('2.7 <= version',
265 doc.getElementsByTagName("skip")[0].firstChild.nodeValue)
266 else:
267 self.assertTrue(
268 doc.getElementsByTagName("failure")[0].firstChild.nodeValue
269 .endswith("AssertionError: version < 2.7\n"))
270
271 def test_error_with_control_characters(self):
272 """Check C0 control characters are stripped rather than output"""
273 class ErrorWithC0(unittest.TestCase):
274 def runTest(self):
275 raise ValueError("\x1F\x0E\x0C\x0B\x08\x01\x00lost control")
276 doc = self._run_and_parse_test(ErrorWithC0())
277 self.assertTrue(
278 doc.getElementsByTagName("error")[0].firstChild.nodeValue
279 .endswith("ValueError: lost control\n"))
280
281 def test_error_with_invalid_cdata(self):
282 """Check unicode outside the valid cdata range is stripped"""
283 if len("\uffff") == 1:
284 # Basic str type supports unicode
285 exception = ValueError("\ufffe\uffffEOF")
286 else:
287 class UTF8_Error(Exception):
288 def __unicode__(self):
289 return str(self).decode("UTF-8")
290 exception = UTF8_Error("\xef\xbf\xbe\xef\xbf\xbfEOF")
291 class ErrorWithBadUnicode(unittest.TestCase):
292 def runTest(self):
293 raise exception
294 doc = self._run_and_parse_test(ErrorWithBadUnicode())
295 self.assertTrue(
296 doc.getElementsByTagName("error")[0].firstChild.nodeValue
297 .endswith("Error: EOF\n"))
298
299 def test_error_with_surrogates(self):
300 """Check unicode surrogates are handled properly, paired or otherwise
301
302 This is a pain due to suboptimal unicode support in Python and the
303 various changes in Python 3. On UCS-2 builds there is no easy way of
304 getting rid of unpaired surrogates while leaving valid pairs alone, so
305 this test doesn't require astral characters are kept there.
306 """
307 if len("\uffff") == 1:
308 exception = ValueError("paired: \U000201a2"
309 " unpaired: "+chr(0xD800)+"-"+chr(0xDFFF))
310 astral_char = "\U000201a2"
311 else:
312 class UTF8_Error(Exception):
313 def __unicode__(self):
314 return str(self).decode("UTF-8")
315 exception = UTF8_Error("paired: \xf0\xa0\x86\xa2"
316 " unpaired: \xed\xa0\x80-\xed\xbf\xbf")
317 astral_char = "\U000201a2".decode("unicode-escape")
318 class ErrorWithSurrogates(unittest.TestCase):
319 def runTest(self):
320 raise exception
321 doc = self._run_and_parse_test(ErrorWithSurrogates())
322 traceback = doc.getElementsByTagName("error")[0].firstChild.nodeValue
323 if sys.maxunicode == 0xFFFF:
324 pass # would be nice to handle astral characters properly even so
325 else:
326 self.assertTrue(astral_char in traceback)
327 self.assertTrue(traceback.endswith(" unpaired: -\n"))
0328
=== added directory 'python-for-subunit2junitxml/subunit'
=== added file 'python-for-subunit2junitxml/subunit/__init__.py'
--- python-for-subunit2junitxml/subunit/__init__.py 1970-01-01 00:00:00 +0000
+++ python-for-subunit2junitxml/subunit/__init__.py 2013-05-28 06:50:46 +0000
@@ -0,0 +1,1250 @@
1#
2# subunit: extensions to Python unittest to get test results from subprocesses.
3# Copyright (C) 2005 Robert Collins <robertc@robertcollins.net>
4#
5# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
6# license at the users choice. A copy of both licenses are available in the
7# project source as Apache-2.0 and BSD. You may not use this file except in
8# compliance with one of these two licences.
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
12# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13# license you chose for the specific language governing permissions and
14# limitations under that license.
15#
16
17"""Subunit - a streaming test protocol
18
19Overview
20++++++++
21
22The ``subunit`` Python package provides a number of ``unittest`` extensions
23which can be used to cause tests to output Subunit, to parse Subunit streams
24into test activity, perform seamless test isolation within a regular test
25case and variously sort, filter and report on test runs.
26
27
28Key Classes
29-----------
30
31The ``subunit.TestProtocolClient`` class is a ``unittest.TestResult``
32extension which will translate a test run into a Subunit stream.
33
34The ``subunit.ProtocolTestCase`` class is an adapter between the Subunit wire
35protocol and the ``unittest.TestCase`` object protocol. It is used to translate
36a stream into a test run, which regular ``unittest.TestResult`` objects can
37process and report/inspect.
38
39Subunit has support for non-blocking usage too, for use with asyncore or
40Twisted. See the ``TestProtocolServer`` parser class for more details.
41
42Subunit includes extensions to the Python ``TestResult`` protocol. These are
43all done in a compatible manner: ``TestResult`` objects that do not implement
44the extension methods will not cause errors to be raised, instead the extension
45will either lose fidelity (for instance, folding expected failures to success
46in Python versions < 2.7 or 3.1), or discard the extended data (for extra
47details, tags, timestamping and progress markers).
48
49The test outcome methods ``addSuccess``, ``addError``, ``addExpectedFailure``,
50``addFailure``, ``addSkip`` take an optional keyword parameter ``details``
51which can be used instead of the usual python unittest parameter.
52When used the value of details should be a dict from ``string`` to
53``testtools.content.Content`` objects. This is a draft API being worked on with
54the Python Testing In Python mail list, with the goal of permitting a common
55way to provide additional data beyond a traceback, such as captured data from
56disk, logging messages etc. The reference for this API is in testtools (0.9.0
57and newer).
58
59The ``tags(new_tags, gone_tags)`` method is called (if present) to add or
60remove tags in the test run that is currently executing. If called when no
61test is in progress (that is, if called outside of the ``startTest``,
62``stopTest`` pair), the the tags apply to all sebsequent tests. If called
63when a test is in progress, then the tags only apply to that test.
64
65The ``time(a_datetime)`` method is called (if present) when a ``time:``
66directive is encountered in a Subunit stream. This is used to tell a TestResult
67about the time that events in the stream occured at, to allow reconstructing
68test timing from a stream.
69
70The ``progress(offset, whence)`` method controls progress data for a stream.
71The offset parameter is an int, and whence is one of subunit.PROGRESS_CUR,
72subunit.PROGRESS_SET, PROGRESS_PUSH, PROGRESS_POP. Push and pop operations
73ignore the offset parameter.
74
75
76Python test support
77-------------------
78
79``subunit.run`` is a convenience wrapper to run a Python test suite via
80the command line, reporting via Subunit::
81
82 $ python -m subunit.run mylib.tests.test_suite
83
84The ``IsolatedTestSuite`` class is a TestSuite that forks before running its
85tests, allowing isolation between the test runner and some tests.
86
87Similarly, ``IsolatedTestCase`` is a base class which can be subclassed to get
88tests that will fork() before that individual test is run.
89
90`ExecTestCase`` is a convenience wrapper for running an external
91program to get a Subunit stream and then report that back to an arbitrary
92result object::
93
94 class AggregateTests(subunit.ExecTestCase):
95
96 def test_script_one(self):
97 './bin/script_one'
98
99 def test_script_two(self):
100 './bin/script_two'
101
102 # Normally your normal test loading would take of this automatically,
103 # It is only spelt out in detail here for clarity.
104 suite = unittest.TestSuite([AggregateTests("test_script_one"),
105 AggregateTests("test_script_two")])
106 # Create any TestResult class you like.
107 result = unittest._TextTestResult(sys.stdout)
108 # And run your suite as normal, Subunit will exec each external script as
109 # needed and report to your result object.
110 suite.run(result)
111
112Utility modules
113---------------
114
115* subunit.chunked contains HTTP chunked encoding/decoding logic.
116* subunit.test_results contains TestResult helper classes.
117"""
118
119import os
120import re
121import subprocess
122import sys
123import unittest
124
125from testtools import content, content_type, ExtendedToOriginalDecorator
126from testtools.compat import _b, _u, BytesIO, StringIO
127try:
128 from testtools.testresult.real import _StringException
129 RemoteException = _StringException
130 # For testing: different pythons have different str() implementations.
131 if sys.version_info > (3, 0):
132 _remote_exception_str = "testtools.testresult.real._StringException"
133 _remote_exception_str_chunked = "34\r\n" + _remote_exception_str
134 else:
135 _remote_exception_str = "_StringException"
136 _remote_exception_str_chunked = "1A\r\n" + _remote_exception_str
137except ImportError:
138 raise ImportError ("testtools.testresult.real does not contain "
139 "_StringException, check your version.")
140from testtools import testresult
141
142from subunit import chunked, details, iso8601, test_results
143
144
145PROGRESS_SET = 0
146PROGRESS_CUR = 1
147PROGRESS_PUSH = 2
148PROGRESS_POP = 3
149
150
151def test_suite():
152 import subunit.tests
153 return subunit.tests.test_suite()
154
155
156def join_dir(base_path, path):
157 """
158 Returns an absolute path to C{path}, calculated relative to the parent
159 of C{base_path}.
160
161 @param base_path: A path to a file or directory.
162 @param path: An absolute path, or a path relative to the containing
163 directory of C{base_path}.
164
165 @return: An absolute path to C{path}.
166 """
167 return os.path.join(os.path.dirname(os.path.abspath(base_path)), path)
168
169
170def tags_to_new_gone(tags):
171 """Split a list of tags into a new_set and a gone_set."""
172 new_tags = set()
173 gone_tags = set()
174 for tag in tags:
175 if tag[0] == '-':
176 gone_tags.add(tag[1:])
177 else:
178 new_tags.add(tag)
179 return new_tags, gone_tags
180
181
182class DiscardStream(object):
183 """A filelike object which discards what is written to it."""
184
185 def write(self, bytes):
186 pass
187
188
189class _ParserState(object):
190 """State for the subunit parser."""
191
192 def __init__(self, parser):
193 self.parser = parser
194 self._test_sym = (_b('test'), _b('testing'))
195 self._colon_sym = _b(':')
196 self._error_sym = (_b('error'),)
197 self._failure_sym = (_b('failure'),)
198 self._progress_sym = (_b('progress'),)
199 self._skip_sym = _b('skip')
200 self._success_sym = (_b('success'), _b('successful'))
201 self._tags_sym = (_b('tags'),)
202 self._time_sym = (_b('time'),)
203 self._xfail_sym = (_b('xfail'),)
204 self._uxsuccess_sym = (_b('uxsuccess'),)
205 self._start_simple = _u(" [")
206 self._start_multipart = _u(" [ multipart")
207
208 def addError(self, offset, line):
209 """An 'error:' directive has been read."""
210 self.parser.stdOutLineReceived(line)
211
212 def addExpectedFail(self, offset, line):
213 """An 'xfail:' directive has been read."""
214 self.parser.stdOutLineReceived(line)
215
216 def addFailure(self, offset, line):
217 """A 'failure:' directive has been read."""
218 self.parser.stdOutLineReceived(line)
219
220 def addSkip(self, offset, line):
221 """A 'skip:' directive has been read."""
222 self.parser.stdOutLineReceived(line)
223
224 def addSuccess(self, offset, line):
225 """A 'success:' directive has been read."""
226 self.parser.stdOutLineReceived(line)
227
228 def lineReceived(self, line):
229 """a line has been received."""
230 parts = line.split(None, 1)
231 if len(parts) == 2 and line.startswith(parts[0]):
232 cmd, rest = parts
233 offset = len(cmd) + 1
234 cmd = cmd.rstrip(self._colon_sym)
235 if cmd in self._test_sym:
236 self.startTest(offset, line)
237 elif cmd in self._error_sym:
238 self.addError(offset, line)
239 elif cmd in self._failure_sym:
240 self.addFailure(offset, line)
241 elif cmd in self._progress_sym:
242 self.parser._handleProgress(offset, line)
243 elif cmd in self._skip_sym:
244 self.addSkip(offset, line)
245 elif cmd in self._success_sym:
246 self.addSuccess(offset, line)
247 elif cmd in self._tags_sym:
248 self.parser._handleTags(offset, line)
249 self.parser.subunitLineReceived(line)
250 elif cmd in self._time_sym:
251 self.parser._handleTime(offset, line)
252 self.parser.subunitLineReceived(line)
253 elif cmd in self._xfail_sym:
254 self.addExpectedFail(offset, line)
255 elif cmd in self._uxsuccess_sym:
256 self.addUnexpectedSuccess(offset, line)
257 else:
258 self.parser.stdOutLineReceived(line)
259 else:
260 self.parser.stdOutLineReceived(line)
261
262 def lostConnection(self):
263 """Connection lost."""
264 self.parser._lostConnectionInTest(_u('unknown state of '))
265
266 def startTest(self, offset, line):
267 """A test start command received."""
268 self.parser.stdOutLineReceived(line)
269
270
271class _InTest(_ParserState):
272 """State for the subunit parser after reading a test: directive."""
273
274 def _outcome(self, offset, line, no_details, details_state):
275 """An outcome directive has been read.
276
277 :param no_details: Callable to call when no details are presented.
278 :param details_state: The state to switch to for details
279 processing of this outcome.
280 """
281 test_name = line[offset:-1].decode('utf8')
282 if self.parser.current_test_description == test_name:
283 self.parser._state = self.parser._outside_test
284 self.parser.current_test_description = None
285 no_details()
286 self.parser.client.stopTest(self.parser._current_test)
287 self.parser._current_test = None
288 self.parser.subunitLineReceived(line)
289 elif self.parser.current_test_description + self._start_simple == \
290 test_name:
291 self.parser._state = details_state
292 details_state.set_simple()
293 self.parser.subunitLineReceived(line)
294 elif self.parser.current_test_description + self._start_multipart == \
295 test_name:
296 self.parser._state = details_state
297 details_state.set_multipart()
298 self.parser.subunitLineReceived(line)
299 else:
300 self.parser.stdOutLineReceived(line)
301
302 def _error(self):
303 self.parser.client.addError(self.parser._current_test,
304 details={})
305
306 def addError(self, offset, line):
307 """An 'error:' directive has been read."""
308 self._outcome(offset, line, self._error,
309 self.parser._reading_error_details)
310
311 def _xfail(self):
312 self.parser.client.addExpectedFailure(self.parser._current_test,
313 details={})
314
315 def addExpectedFail(self, offset, line):
316 """An 'xfail:' directive has been read."""
317 self._outcome(offset, line, self._xfail,
318 self.parser._reading_xfail_details)
319
320 def _uxsuccess(self):
321 self.parser.client.addUnexpectedSuccess(self.parser._current_test)
322
323 def addUnexpectedSuccess(self, offset, line):
324 """A 'uxsuccess:' directive has been read."""
325 self._outcome(offset, line, self._uxsuccess,
326 self.parser._reading_uxsuccess_details)
327
328 def _failure(self):
329 self.parser.client.addFailure(self.parser._current_test, details={})
330
331 def addFailure(self, offset, line):
332 """A 'failure:' directive has been read."""
333 self._outcome(offset, line, self._failure,
334 self.parser._reading_failure_details)
335
336 def _skip(self):
337 self.parser.client.addSkip(self.parser._current_test, details={})
338
339 def addSkip(self, offset, line):
340 """A 'skip:' directive has been read."""
341 self._outcome(offset, line, self._skip,
342 self.parser._reading_skip_details)
343
344 def _succeed(self):
345 self.parser.client.addSuccess(self.parser._current_test, details={})
346
347 def addSuccess(self, offset, line):
348 """A 'success:' directive has been read."""
349 self._outcome(offset, line, self._succeed,
350 self.parser._reading_success_details)
351
352 def lostConnection(self):
353 """Connection lost."""
354 self.parser._lostConnectionInTest(_u(''))
355
356
357class _OutSideTest(_ParserState):
358 """State for the subunit parser outside of a test context."""
359
360 def lostConnection(self):
361 """Connection lost."""
362
363 def startTest(self, offset, line):
364 """A test start command received."""
365 self.parser._state = self.parser._in_test
366 test_name = line[offset:-1].decode('utf8')
367 self.parser._current_test = RemotedTestCase(test_name)
368 self.parser.current_test_description = test_name
369 self.parser.client.startTest(self.parser._current_test)
370 self.parser.subunitLineReceived(line)
371
372
373class _ReadingDetails(_ParserState):
374 """Common logic for readin state details."""
375
376 def endDetails(self):
377 """The end of a details section has been reached."""
378 self.parser._state = self.parser._outside_test
379 self.parser.current_test_description = None
380 self._report_outcome()
381 self.parser.client.stopTest(self.parser._current_test)
382
383 def lineReceived(self, line):
384 """a line has been received."""
385 self.details_parser.lineReceived(line)
386 self.parser.subunitLineReceived(line)
387
388 def lostConnection(self):
389 """Connection lost."""
390 self.parser._lostConnectionInTest(_u('%s report of ') %
391 self._outcome_label())
392
393 def _outcome_label(self):
394 """The label to describe this outcome."""
395 raise NotImplementedError(self._outcome_label)
396
397 def set_simple(self):
398 """Start a simple details parser."""
399 self.details_parser = details.SimpleDetailsParser(self)
400
401 def set_multipart(self):
402 """Start a multipart details parser."""
403 self.details_parser = details.MultipartDetailsParser(self)
404
405
406class _ReadingFailureDetails(_ReadingDetails):
407 """State for the subunit parser when reading failure details."""
408
409 def _report_outcome(self):
410 self.parser.client.addFailure(self.parser._current_test,
411 details=self.details_parser.get_details())
412
413 def _outcome_label(self):
414 return "failure"
415
416
417class _ReadingErrorDetails(_ReadingDetails):
418 """State for the subunit parser when reading error details."""
419
420 def _report_outcome(self):
421 self.parser.client.addError(self.parser._current_test,
422 details=self.details_parser.get_details())
423
424 def _outcome_label(self):
425 return "error"
426
427
428class _ReadingExpectedFailureDetails(_ReadingDetails):
429 """State for the subunit parser when reading xfail details."""
430
431 def _report_outcome(self):
432 self.parser.client.addExpectedFailure(self.parser._current_test,
433 details=self.details_parser.get_details())
434
435 def _outcome_label(self):
436 return "xfail"
437
438
439class _ReadingUnexpectedSuccessDetails(_ReadingDetails):
440 """State for the subunit parser when reading uxsuccess details."""
441
442 def _report_outcome(self):
443 self.parser.client.addUnexpectedSuccess(self.parser._current_test,
444 details=self.details_parser.get_details())
445
446 def _outcome_label(self):
447 return "uxsuccess"
448
449
450class _ReadingSkipDetails(_ReadingDetails):
451 """State for the subunit parser when reading skip details."""
452
453 def _report_outcome(self):
454 self.parser.client.addSkip(self.parser._current_test,
455 details=self.details_parser.get_details("skip"))
456
457 def _outcome_label(self):
458 return "skip"
459
460
461class _ReadingSuccessDetails(_ReadingDetails):
462 """State for the subunit parser when reading success details."""
463
464 def _report_outcome(self):
465 self.parser.client.addSuccess(self.parser._current_test,
466 details=self.details_parser.get_details("success"))
467
468 def _outcome_label(self):
469 return "success"
470
471
472class TestProtocolServer(object):
473 """A parser for subunit.
474
475 :ivar tags: The current tags associated with the protocol stream.
476 """
477
478 def __init__(self, client, stream=None, forward_stream=None):
479 """Create a TestProtocolServer instance.
480
481 :param client: An object meeting the unittest.TestResult protocol.
482 :param stream: The stream that lines received which are not part of the
483 subunit protocol should be written to. This allows custom handling
484 of mixed protocols. By default, sys.stdout will be used for
485 convenience. It should accept bytes to its write() method.
486 :param forward_stream: A stream to forward subunit lines to. This
487 allows a filter to forward the entire stream while still parsing
488 and acting on it. By default forward_stream is set to
489 DiscardStream() and no forwarding happens.
490 """
491 self.client = ExtendedToOriginalDecorator(client)
492 if stream is None:
493 stream = sys.stdout
494 if sys.version_info > (3, 0):
495 stream = stream.buffer
496 self._stream = stream
497 self._forward_stream = forward_stream or DiscardStream()
498 # state objects we can switch too
499 self._in_test = _InTest(self)
500 self._outside_test = _OutSideTest(self)
501 self._reading_error_details = _ReadingErrorDetails(self)
502 self._reading_failure_details = _ReadingFailureDetails(self)
503 self._reading_skip_details = _ReadingSkipDetails(self)
504 self._reading_success_details = _ReadingSuccessDetails(self)
505 self._reading_xfail_details = _ReadingExpectedFailureDetails(self)
506 self._reading_uxsuccess_details = _ReadingUnexpectedSuccessDetails(self)
507 # start with outside test.
508 self._state = self._outside_test
509 # Avoid casts on every call
510 self._plusminus = _b('+-')
511 self._push_sym = _b('push')
512 self._pop_sym = _b('pop')
513
514 def _handleProgress(self, offset, line):
515 """Process a progress directive."""
516 line = line[offset:].strip()
517 if line[0] in self._plusminus:
518 whence = PROGRESS_CUR
519 delta = int(line)
520 elif line == self._push_sym:
521 whence = PROGRESS_PUSH
522 delta = None
523 elif line == self._pop_sym:
524 whence = PROGRESS_POP
525 delta = None
526 else:
527 whence = PROGRESS_SET
528 delta = int(line)
529 self.client.progress(delta, whence)
530
531 def _handleTags(self, offset, line):
532 """Process a tags command."""
533 tags = line[offset:].decode('utf8').split()
534 new_tags, gone_tags = tags_to_new_gone(tags)
535 self.client.tags(new_tags, gone_tags)
536
537 def _handleTime(self, offset, line):
538 # Accept it, but do not do anything with it yet.
539 try:
540 event_time = iso8601.parse_date(line[offset:-1])
541 except TypeError:
542 raise TypeError(_u("Failed to parse %r, got %r")
543 % (line, sys.exec_info[1]))
544 self.client.time(event_time)
545
546 def lineReceived(self, line):
547 """Call the appropriate local method for the received line."""
548 self._state.lineReceived(line)
549
550 def _lostConnectionInTest(self, state_string):
551 error_string = _u("lost connection during %stest '%s'") % (
552 state_string, self.current_test_description)
553 self.client.addError(self._current_test, RemoteError(error_string))
554 self.client.stopTest(self._current_test)
555
556 def lostConnection(self):
557 """The input connection has finished."""
558 self._state.lostConnection()
559
560 def readFrom(self, pipe):
561 """Blocking convenience API to parse an entire stream.
562
563 :param pipe: A file-like object supporting readlines().
564 :return: None.
565 """
566 for line in pipe.readlines():
567 self.lineReceived(line)
568 self.lostConnection()
569
570 def _startTest(self, offset, line):
571 """Internal call to change state machine. Override startTest()."""
572 self._state.startTest(offset, line)
573
574 def subunitLineReceived(self, line):
575 self._forward_stream.write(line)
576
577 def stdOutLineReceived(self, line):
578 self._stream.write(line)
579
580
581class TestProtocolClient(testresult.TestResult):
582 """A TestResult which generates a subunit stream for a test run.
583
584 # Get a TestSuite or TestCase to run
585 suite = make_suite()
586 # Create a stream (any object with a 'write' method). This should accept
587 # bytes not strings: subunit is a byte orientated protocol.
588 stream = file('tests.log', 'wb')
589 # Create a subunit result object which will output to the stream
590 result = subunit.TestProtocolClient(stream)
591 # Optionally, to get timing data for performance analysis, wrap the
592 # serialiser with a timing decorator
593 result = subunit.test_results.AutoTimingTestResultDecorator(result)
594 # Run the test suite reporting to the subunit result object
595 suite.run(result)
596 # Close the stream.
597 stream.close()
598 """
599
600 def __init__(self, stream):
601 testresult.TestResult.__init__(self)
602 self._stream = stream
603 _make_stream_binary(stream)
604 self._progress_fmt = _b("progress: ")
605 self._bytes_eol = _b("\n")
606 self._progress_plus = _b("+")
607 self._progress_push = _b("push")
608 self._progress_pop = _b("pop")
609 self._empty_bytes = _b("")
610 self._start_simple = _b(" [\n")
611 self._end_simple = _b("]\n")
612
613 def addError(self, test, error=None, details=None):
614 """Report an error in test test.
615
616 Only one of error and details should be provided: conceptually there
617 are two separate methods:
618 addError(self, test, error)
619 addError(self, test, details)
620
621 :param error: Standard unittest positional argument form - an
622 exc_info tuple.
623 :param details: New Testing-in-python drafted API; a dict from string
624 to subunit.Content objects.
625 """
626 self._addOutcome("error", test, error=error, details=details)
627
628 def addExpectedFailure(self, test, error=None, details=None):
629 """Report an expected failure in test test.
630
631 Only one of error and details should be provided: conceptually there
632 are two separate methods:
633 addError(self, test, error)
634 addError(self, test, details)
635
636 :param error: Standard unittest positional argument form - an
637 exc_info tuple.
638 :param details: New Testing-in-python drafted API; a dict from string
639 to subunit.Content objects.
640 """
641 self._addOutcome("xfail", test, error=error, details=details)
642
643 def addFailure(self, test, error=None, details=None):
644 """Report a failure in test test.
645
646 Only one of error and details should be provided: conceptually there
647 are two separate methods:
648 addFailure(self, test, error)
649 addFailure(self, test, details)
650
651 :param error: Standard unittest positional argument form - an
652 exc_info tuple.
653 :param details: New Testing-in-python drafted API; a dict from string
654 to subunit.Content objects.
655 """
656 self._addOutcome("failure", test, error=error, details=details)
657
658 def _addOutcome(self, outcome, test, error=None, details=None,
659 error_permitted=True):
660 """Report a failure in test test.
661
662 Only one of error and details should be provided: conceptually there
663 are two separate methods:
664 addOutcome(self, test, error)
665 addOutcome(self, test, details)
666
667 :param outcome: A string describing the outcome - used as the
668 event name in the subunit stream.
669 :param error: Standard unittest positional argument form - an
670 exc_info tuple.
671 :param details: New Testing-in-python drafted API; a dict from string
672 to subunit.Content objects.
673 :param error_permitted: If True then one and only one of error or
674 details must be supplied. If False then error must not be supplied
675 and details is still optional. """
676 self._stream.write(_b("%s: %s" % (outcome, test.id())))
677 if error_permitted:
678 if error is None and details is None:
679 raise ValueError
680 else:
681 if error is not None:
682 raise ValueError
683 if error is not None:
684 self._stream.write(self._start_simple)
685 # XXX: this needs to be made much stricter, along the lines of
686 # Martin[gz]'s work in testtools. Perhaps subunit can use that?
687 for line in self._exc_info_to_unicode(error, test).splitlines():
688 self._stream.write(("%s\n" % line).encode('utf8'))
689 elif details is not None:
690 self._write_details(details)
691 else:
692 self._stream.write(_b("\n"))
693 if details is not None or error is not None:
694 self._stream.write(self._end_simple)
695
696 def addSkip(self, test, reason=None, details=None):
697 """Report a skipped test."""
698 if reason is None:
699 self._addOutcome("skip", test, error=None, details=details)
700 else:
701 self._stream.write(_b("skip: %s [\n" % test.id()))
702 self._stream.write(_b("%s\n" % reason))
703 self._stream.write(self._end_simple)
704
705 def addSuccess(self, test, details=None):
706 """Report a success in a test."""
707 self._addOutcome("successful", test, details=details, error_permitted=False)
708
709 def addUnexpectedSuccess(self, test, details=None):
710 """Report an unexpected success in test test.
711
712 Details can optionally be provided: conceptually there
713 are two separate methods:
714 addError(self, test)
715 addError(self, test, details)
716
717 :param details: New Testing-in-python drafted API; a dict from string
718 to subunit.Content objects.
719 """
720 self._addOutcome("uxsuccess", test, details=details,
721 error_permitted=False)
722
723 def startTest(self, test):
724 """Mark a test as starting its test run."""
725 super(TestProtocolClient, self).startTest(test)
726 self._stream.write(_b("test: %s\n" % test.id()))
727 self._stream.flush()
728
729 def stopTest(self, test):
730 super(TestProtocolClient, self).stopTest(test)
731 self._stream.flush()
732
733 def progress(self, offset, whence):
734 """Provide indication about the progress/length of the test run.
735
736 :param offset: Information about the number of tests remaining. If
737 whence is PROGRESS_CUR, then offset increases/decreases the
738 remaining test count. If whence is PROGRESS_SET, then offset
739 specifies exactly the remaining test count.
740 :param whence: One of PROGRESS_CUR, PROGRESS_SET, PROGRESS_PUSH,
741 PROGRESS_POP.
742 """
743 if whence == PROGRESS_CUR and offset > -1:
744 prefix = self._progress_plus
745 offset = _b(str(offset))
746 elif whence == PROGRESS_PUSH:
747 prefix = self._empty_bytes
748 offset = self._progress_push
749 elif whence == PROGRESS_POP:
750 prefix = self._empty_bytes
751 offset = self._progress_pop
752 else:
753 prefix = self._empty_bytes
754 offset = _b(str(offset))
755 self._stream.write(self._progress_fmt + prefix + offset +
756 self._bytes_eol)
757
758 def time(self, a_datetime):
759 """Inform the client of the time.
760
761 ":param datetime: A datetime.datetime object.
762 """
763 time = a_datetime.astimezone(iso8601.Utc())
764 self._stream.write(_b("time: %04d-%02d-%02d %02d:%02d:%02d.%06dZ\n" % (
765 time.year, time.month, time.day, time.hour, time.minute,
766 time.second, time.microsecond)))
767
768 def _write_details(self, details):
769 """Output details to the stream.
770
771 :param details: An extended details dict for a test outcome.
772 """
773 self._stream.write(_b(" [ multipart\n"))
774 for name, content in sorted(details.items()):
775 self._stream.write(_b("Content-Type: %s/%s" %
776 (content.content_type.type, content.content_type.subtype)))
777 parameters = content.content_type.parameters
778 if parameters:
779 self._stream.write(_b(";"))
780 param_strs = []
781 for param, value in parameters.items():
782 param_strs.append("%s=%s" % (param, value))
783 self._stream.write(_b(",".join(param_strs)))
784 self._stream.write(_b("\n%s\n" % name))
785 encoder = chunked.Encoder(self._stream)
786 list(map(encoder.write, content.iter_bytes()))
787 encoder.close()
788
789 def done(self):
790 """Obey the testtools result.done() interface."""
791
792
793def RemoteError(description=_u("")):
794 return (_StringException, _StringException(description), None)
795
796
797class RemotedTestCase(unittest.TestCase):
798 """A class to represent test cases run in child processes.
799
800 Instances of this class are used to provide the Python test API a TestCase
801 that can be printed to the screen, introspected for metadata and so on.
802 However, as they are a simply a memoisation of a test that was actually
803 run in the past by a separate process, they cannot perform any interactive
804 actions.
805 """
806
807 def __eq__ (self, other):
808 try:
809 return self.__description == other.__description
810 except AttributeError:
811 return False
812
813 def __init__(self, description):
814 """Create a psuedo test case with description description."""
815 self.__description = description
816
817 def error(self, label):
818 raise NotImplementedError("%s on RemotedTestCases is not permitted." %
819 label)
820
821 def setUp(self):
822 self.error("setUp")
823
824 def tearDown(self):
825 self.error("tearDown")
826
827 def shortDescription(self):
828 return self.__description
829
830 def id(self):
831 return "%s" % (self.__description,)
832
833 def __str__(self):
834 return "%s (%s)" % (self.__description, self._strclass())
835
836 def __repr__(self):
837 return "<%s description='%s'>" % \
838 (self._strclass(), self.__description)
839
840 def run(self, result=None):
841 if result is None: result = self.defaultTestResult()
842 result.startTest(self)
843 result.addError(self, RemoteError(_u("Cannot run RemotedTestCases.\n")))
844 result.stopTest(self)
845
846 def _strclass(self):
847 cls = self.__class__
848 return "%s.%s" % (cls.__module__, cls.__name__)
849
850
851class ExecTestCase(unittest.TestCase):
852 """A test case which runs external scripts for test fixtures."""
853
854 def __init__(self, methodName='runTest'):
855 """Create an instance of the class that will use the named test
856 method when executed. Raises a ValueError if the instance does
857 not have a method with the specified name.
858 """
859 unittest.TestCase.__init__(self, methodName)
860 testMethod = getattr(self, methodName)
861 self.script = join_dir(sys.modules[self.__class__.__module__].__file__,
862 testMethod.__doc__)
863
864 def countTestCases(self):
865 return 1
866
867 def run(self, result=None):
868 if result is None: result = self.defaultTestResult()
869 self._run(result)
870
871 def debug(self):
872 """Run the test without collecting errors in a TestResult"""
873 self._run(testresult.TestResult())
874
875 def _run(self, result):
876 protocol = TestProtocolServer(result)
877 process = subprocess.Popen(self.script, shell=True,
878 stdout=subprocess.PIPE)
879 _make_stream_binary(process.stdout)
880 output = process.communicate()[0]
881 protocol.readFrom(BytesIO(output))
882
883
884class IsolatedTestCase(unittest.TestCase):
885 """A TestCase which executes in a forked process.
886
887 Each test gets its own process, which has a performance overhead but will
888 provide excellent isolation from global state (such as django configs,
889 zope utilities and so on).
890 """
891
892 def run(self, result=None):
893 if result is None: result = self.defaultTestResult()
894 run_isolated(unittest.TestCase, self, result)
895
896
897class IsolatedTestSuite(unittest.TestSuite):
898 """A TestSuite which runs its tests in a forked process.
899
900 This decorator that will fork() before running the tests and report the
901 results from the child process using a Subunit stream. This is useful for
902 handling tests that mutate global state, or are testing C extensions that
903 could crash the VM.
904 """
905
906 def run(self, result=None):
907 if result is None: result = testresult.TestResult()
908 run_isolated(unittest.TestSuite, self, result)
909
910
911def run_isolated(klass, self, result):
912 """Run a test suite or case in a subprocess, using the run method on klass.
913 """
914 c2pread, c2pwrite = os.pipe()
915 # fixme - error -> result
916 # now fork
917 pid = os.fork()
918 if pid == 0:
919 # Child
920 # Close parent's pipe ends
921 os.close(c2pread)
922 # Dup fds for child
923 os.dup2(c2pwrite, 1)
924 # Close pipe fds.
925 os.close(c2pwrite)
926
927 # at this point, sys.stdin is redirected, now we want
928 # to filter it to escape ]'s.
929 ### XXX: test and write that bit.
930 stream = os.fdopen(1, 'wb')
931 result = TestProtocolClient(stream)
932 klass.run(self, result)
933 stream.flush()
934 sys.stderr.flush()
935 # exit HARD, exit NOW.
936 os._exit(0)
937 else:
938 # Parent
939 # Close child pipe ends
940 os.close(c2pwrite)
941 # hookup a protocol engine
942 protocol = TestProtocolServer(result)
943 fileobj = os.fdopen(c2pread, 'rb')
944 protocol.readFrom(fileobj)
945 os.waitpid(pid, 0)
946 # TODO return code evaluation.
947 return result
948
949
950def TAP2SubUnit(tap, subunit):
951 """Filter a TAP pipe into a subunit pipe.
952
953 :param tap: A tap pipe/stream/file object.
954 :param subunit: A pipe/stream/file object to write subunit results to.
955 :return: The exit code to exit with.
956 """
957 BEFORE_PLAN = 0
958 AFTER_PLAN = 1
959 SKIP_STREAM = 2
960 state = BEFORE_PLAN
961 plan_start = 1
962 plan_stop = 0
963 def _skipped_test(subunit, plan_start):
964 # Some tests were skipped.
965 subunit.write('test test %d\n' % plan_start)
966 subunit.write('error test %d [\n' % plan_start)
967 subunit.write('test missing from TAP output\n')
968 subunit.write(']\n')
969 return plan_start + 1
970 # Test data for the next test to emit
971 test_name = None
972 log = []
973 result = None
974 def _emit_test():
975 "write out a test"
976 if test_name is None:
977 return
978 subunit.write("test %s\n" % test_name)
979 if not log:
980 subunit.write("%s %s\n" % (result, test_name))
981 else:
982 subunit.write("%s %s [\n" % (result, test_name))
983 if log:
984 for line in log:
985 subunit.write("%s\n" % line)
986 subunit.write("]\n")
987 del log[:]
988 for line in tap:
989 if state == BEFORE_PLAN:
990 match = re.match("(\d+)\.\.(\d+)\s*(?:\#\s+(.*))?\n", line)
991 if match:
992 state = AFTER_PLAN
993 _, plan_stop, comment = match.groups()
994 plan_stop = int(plan_stop)
995 if plan_start > plan_stop and plan_stop == 0:
996 # skipped file
997 state = SKIP_STREAM
998 subunit.write("test file skip\n")
999 subunit.write("skip file skip [\n")
1000 subunit.write("%s\n" % comment)
1001 subunit.write("]\n")
1002 continue
1003 # not a plan line, or have seen one before
1004 match = re.match("(ok|not ok)(?:\s+(\d+)?)?(?:\s+([^#]*[^#\s]+)\s*)?(?:\s+#\s+(TODO|SKIP|skip|todo)(?:\s+(.*))?)?\n", line)
1005 if match:
1006 # new test, emit current one.
1007 _emit_test()
1008 status, number, description, directive, directive_comment = match.groups()
1009 if status == 'ok':
1010 result = 'success'
1011 else:
1012 result = "failure"
1013 if description is None:
1014 description = ''
1015 else:
1016 description = ' ' + description
1017 if directive is not None:
1018 if directive.upper() == 'TODO':
1019 result = 'xfail'
1020 elif directive.upper() == 'SKIP':
1021 result = 'skip'
1022 if directive_comment is not None:
1023 log.append(directive_comment)
1024 if number is not None:
1025 number = int(number)
1026 while plan_start < number:
1027 plan_start = _skipped_test(subunit, plan_start)
1028 test_name = "test %d%s" % (plan_start, description)
1029 plan_start += 1
1030 continue
1031 match = re.match("Bail out\!(?:\s*(.*))?\n", line)
1032 if match:
1033 reason, = match.groups()
1034 if reason is None:
1035 extra = ''
1036 else:
1037 extra = ' %s' % reason
1038 _emit_test()
1039 test_name = "Bail out!%s" % extra
1040 result = "error"
1041 state = SKIP_STREAM
1042 continue
1043 match = re.match("\#.*\n", line)
1044 if match:
1045 log.append(line[:-1])
1046 continue
1047 subunit.write(line)
1048 _emit_test()
1049 while plan_start <= plan_stop:
1050 # record missed tests
1051 plan_start = _skipped_test(subunit, plan_start)
1052 return 0
1053
1054
1055def tag_stream(original, filtered, tags):
1056 """Alter tags on a stream.
1057
1058 :param original: The input stream.
1059 :param filtered: The output stream.
1060 :param tags: The tags to apply. As in a normal stream - a list of 'TAG' or
1061 '-TAG' commands.
1062
1063 A 'TAG' command will add the tag to the output stream,
1064 and override any existing '-TAG' command in that stream.
1065 Specifically:
1066 * A global 'tags: TAG' will be added to the start of the stream.
1067 * Any tags commands with -TAG will have the -TAG removed.
1068
1069 A '-TAG' command will remove the TAG command from the stream.
1070 Specifically:
1071 * A 'tags: -TAG' command will be added to the start of the stream.
1072 * Any 'tags: TAG' command will have 'TAG' removed from it.
1073 Additionally, any redundant tagging commands (adding a tag globally
1074 present, or removing a tag globally removed) are stripped as a
1075 by-product of the filtering.
1076 :return: 0
1077 """
1078 new_tags, gone_tags = tags_to_new_gone(tags)
1079 def write_tags(new_tags, gone_tags):
1080 if new_tags or gone_tags:
1081 filtered.write("tags: " + ' '.join(new_tags))
1082 if gone_tags:
1083 for tag in gone_tags:
1084 filtered.write("-" + tag)
1085 filtered.write("\n")
1086 write_tags(new_tags, gone_tags)
1087 # TODO: use the protocol parser and thus don't mangle test comments.
1088 for line in original:
1089 if line.startswith("tags:"):
1090 line_tags = line[5:].split()
1091 line_new, line_gone = tags_to_new_gone(line_tags)
1092 line_new = line_new - gone_tags
1093 line_gone = line_gone - new_tags
1094 write_tags(line_new, line_gone)
1095 else:
1096 filtered.write(line)
1097 return 0
1098
1099
1100class ProtocolTestCase(object):
1101 """Subunit wire protocol to unittest.TestCase adapter.
1102
1103 ProtocolTestCase honours the core of ``unittest.TestCase`` protocol -
1104 calling a ProtocolTestCase or invoking the run() method will make a 'test
1105 run' happen. The 'test run' will simply be a replay of the test activity
1106 that has been encoded into the stream. The ``unittest.TestCase`` ``debug``
1107 and ``countTestCases`` methods are not supported because there isn't a
1108 sensible mapping for those methods.
1109
1110 # Get a stream (any object with a readline() method), in this case the
1111 # stream output by the example from ``subunit.TestProtocolClient``.
1112 stream = file('tests.log', 'rb')
1113 # Create a parser which will read from the stream and emit
1114 # activity to a unittest.TestResult when run() is called.
1115 suite = subunit.ProtocolTestCase(stream)
1116 # Create a result object to accept the contents of that stream.
1117 result = unittest._TextTestResult(sys.stdout)
1118 # 'run' the tests - process the stream and feed its contents to result.
1119 suite.run(result)
1120 stream.close()
1121
1122 :seealso: TestProtocolServer (the subunit wire protocol parser).
1123 """
1124
1125 def __init__(self, stream, passthrough=None, forward=False):
1126 """Create a ProtocolTestCase reading from stream.
1127
1128 :param stream: A filelike object which a subunit stream can be read
1129 from.
1130 :param passthrough: A stream pass non subunit input on to. If not
1131 supplied, the TestProtocolServer default is used.
1132 :param forward: A stream to pass subunit input on to. If not supplied
1133 subunit input is not forwarded.
1134 """
1135 self._stream = stream
1136 _make_stream_binary(stream)
1137 self._passthrough = passthrough
1138 self._forward = forward
1139
1140 def __call__(self, result=None):
1141 return self.run(result)
1142
1143 def run(self, result=None):
1144 if result is None:
1145 result = self.defaultTestResult()
1146 protocol = TestProtocolServer(result, self._passthrough, self._forward)
1147 line = self._stream.readline()
1148 while line:
1149 protocol.lineReceived(line)
1150 line = self._stream.readline()
1151 protocol.lostConnection()
1152
1153
1154class TestResultStats(testresult.TestResult):
1155 """A pyunit TestResult interface implementation for making statistics.
1156
1157 :ivar total_tests: The total tests seen.
1158 :ivar passed_tests: The tests that passed.
1159 :ivar failed_tests: The tests that failed.
1160 :ivar seen_tags: The tags seen across all tests.
1161 """
1162
1163 def __init__(self, stream):
1164 """Create a TestResultStats which outputs to stream."""
1165 testresult.TestResult.__init__(self)
1166 self._stream = stream
1167 self.failed_tests = 0
1168 self.skipped_tests = 0
1169 self.seen_tags = set()
1170
1171 @property
1172 def total_tests(self):
1173 return self.testsRun
1174
1175 def addError(self, test, err, details=None):
1176 self.failed_tests += 1
1177
1178 def addFailure(self, test, err, details=None):
1179 self.failed_tests += 1
1180
1181 def addSkip(self, test, reason, details=None):
1182 self.skipped_tests += 1
1183
1184 def formatStats(self):
1185 self._stream.write("Total tests: %5d\n" % self.total_tests)
1186 self._stream.write("Passed tests: %5d\n" % self.passed_tests)
1187 self._stream.write("Failed tests: %5d\n" % self.failed_tests)
1188 self._stream.write("Skipped tests: %5d\n" % self.skipped_tests)
1189 tags = sorted(self.seen_tags)
1190 self._stream.write("Seen tags: %s\n" % (", ".join(tags)))
1191
1192 @property
1193 def passed_tests(self):
1194 return self.total_tests - self.failed_tests - self.skipped_tests
1195
1196 def tags(self, new_tags, gone_tags):
1197 """Accumulate the seen tags."""
1198 self.seen_tags.update(new_tags)
1199
1200 def wasSuccessful(self):
1201 """Tells whether or not this result was a success"""
1202 return self.failed_tests == 0
1203
1204
1205def get_default_formatter():
1206 """Obtain the default formatter to write to.
1207
1208 :return: A file-like object.
1209 """
1210 formatter = os.getenv("SUBUNIT_FORMATTER")
1211 if formatter:
1212 return os.popen(formatter, "w")
1213 else:
1214 stream = sys.stdout
1215 if sys.version_info > (3, 0):
1216 stream = stream.buffer
1217 return stream
1218
1219
1220if sys.version_info > (3, 0):
1221 from io import UnsupportedOperation as _NoFilenoError
1222else:
1223 _NoFilenoError = AttributeError
1224
1225def read_test_list(path):
1226 """Read a list of test ids from a file on disk.
1227
1228 :param path: Path to the file
1229 :return: Sequence of test ids
1230 """
1231 f = open(path, 'rb')
1232 try:
1233 return [l.rstrip("\n") for l in f.readlines()]
1234 finally:
1235 f.close()
1236
1237
1238def _make_stream_binary(stream):
1239 """Ensure that a stream will be binary safe. See _make_binary_on_windows."""
1240 try:
1241 fileno = stream.fileno()
1242 except _NoFilenoError:
1243 return
1244 _make_binary_on_windows(fileno)
1245
1246def _make_binary_on_windows(fileno):
1247 """Win32 mangles \r\n to \n and that breaks streams. See bug lp:505078."""
1248 if sys.platform == "win32":
1249 import msvcrt
1250 msvcrt.setmode(fileno, os.O_BINARY)
01251
=== added file 'python-for-subunit2junitxml/subunit/chunked.py'
--- python-for-subunit2junitxml/subunit/chunked.py 1970-01-01 00:00:00 +0000
+++ python-for-subunit2junitxml/subunit/chunked.py 2013-05-28 06:50:46 +0000
@@ -0,0 +1,185 @@
1#
2# subunit: extensions to python unittest to get test results from subprocesses.
3# Copyright (C) 2005 Robert Collins <robertc@robertcollins.net>
4# Copyright (C) 2011 Martin Pool <mbp@sourcefrog.net>
5#
6# Licensed under either the Apache License, Version 2.0 or the BSD 3-clause
7# license at the users choice. A copy of both licenses are available in the
8# project source as Apache-2.0 and BSD. You may not use this file except in
9# compliance with one of these two licences.
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under these licenses is distributed on an "AS IS" BASIS, WITHOUT
13# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14# license you chose for the specific language governing permissions and
15# limitations under that license.
16#
17
18"""Encoder/decoder for http style chunked encoding."""
19
20from testtools.compat import _b
21
22empty = _b('')
23
24class Decoder(object):
25 """Decode chunked content to a byte stream."""
26
27 def __init__(self, output, strict=True):
28 """Create a decoder decoding to output.
29
30 :param output: A file-like object. Bytes written to the Decoder are
31 decoded to strip off the chunking and written to the output.
32 Up to a full write worth of data or a single control line may be
33 buffered (whichever is larger). The close method should be called
34 when no more data is available, to detect short streams; the
35 write method will return none-None when the end of a stream is
36 detected. The output object must accept bytes objects.
37
38 :param strict: If True (the default), the decoder will not knowingly
39 accept input that is not conformant to the HTTP specification.
40 (This does not imply that it will catch every nonconformance.)
41 If False, it will accept incorrect input that is still
42 unambiguous.
43 """
44 self.output = output
45 self.buffered_bytes = []
46 self.state = self._read_length
47 self.body_length = 0
48 self.strict = strict
49 self._match_chars = _b("0123456789abcdefABCDEF\r\n")
50 self._slash_n = _b('\n')
51 self._slash_r = _b('\r')
52 self._slash_rn = _b('\r\n')
53 self._slash_nr = _b('\n\r')
54
55 def close(self):
56 """Close the decoder.
57
58 :raises ValueError: If the stream is incomplete ValueError is raised.
59 """
60 if self.state != self._finished:
61 raise ValueError("incomplete stream")
62
63 def _finished(self):
64 """Finished reading, return any remaining bytes."""
65 if self.buffered_bytes:
66 buffered_bytes = self.buffered_bytes
67 self.buffered_bytes = []
68 return empty.join(buffered_bytes)
69 else:
70 raise ValueError("stream is finished")
71
72 def _read_body(self):
73 """Pass body bytes to the output."""
74 while self.body_length and self.buffered_bytes:
75 if self.body_length >= len(self.buffered_bytes[0]):
76 self.output.write(self.buffered_bytes[0])
77 self.body_length -= len(self.buffered_bytes[0])
78 del self.buffered_bytes[0]
79 # No more data available.
80 if not self.body_length:
81 self.state = self._read_length
82 else:
83 self.output.write(self.buffered_bytes[0][:self.body_length])
84 self.buffered_bytes[0] = \
85 self.buffered_bytes[0][self.body_length:]
86 self.body_length = 0
87 self.state = self._read_length
88 return self.state()
89
90 def _read_length(self):
91 """Try to decode a length from the bytes."""
92 count_chars = []
93 for bytes in self.buffered_bytes:
94 for pos in range(len(bytes)):
95 byte = bytes[pos:pos+1]
96 if byte not in self._match_chars:
97 break
98 count_chars.append(byte)
99 if byte == self._slash_n:
100 break
101 if not count_chars:
102 return
103 if count_chars[-1] != self._slash_n:
104 return
105 count_str = empty.join(count_chars)
106 if self.strict:
107 if count_str[-2:] != self._slash_rn:
108 raise ValueError("chunk header invalid: %r" % count_str)
109 if self._slash_r in count_str[:-2]:
110 raise ValueError("too many CRs in chunk header %r" % count_str)
111 self.body_length = int(count_str.rstrip(self._slash_nr), 16)
112 excess_bytes = len(count_str)
113 while excess_bytes:
114 if excess_bytes >= len(self.buffered_bytes[0]):
115 excess_bytes -= len(self.buffered_bytes[0])
116 del self.buffered_bytes[0]
117 else:
118 self.buffered_bytes[0] = self.buffered_bytes[0][excess_bytes:]
119 excess_bytes = 0
120 if not self.body_length:
121 self.state = self._finished
122 if not self.buffered_bytes:
123 # May not call into self._finished with no buffered data.
124 return empty
125 else:
126 self.state = self._read_body
127 return self.state()
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches