Merge lp:~bcsaller/charm-tools/composer into lp:charm-tools/1.6
- composer
- Merge into 1.6
Status: | Merged |
---|---|
Merged at revision: | 359 |
Proposed branch: | lp:~bcsaller/charm-tools/composer |
Merge into: | lp:charm-tools/1.6 |
Diff against target: |
12668 lines (+11533/-368) 117 files modified
.bzrignore (+3/-1) MANIFEST.in (+1/-0) Makefile (+14/-37) charmtools/compose/__init__.py (+466/-0) charmtools/compose/config.py (+113/-0) charmtools/compose/diff_match_patch.py (+1919/-0) charmtools/compose/fetchers.py (+117/-0) charmtools/compose/inspector.py (+101/-0) charmtools/compose/tactics.py (+449/-0) charmtools/utils.py (+518/-0) doc/source/compose-intro.md (+18/-0) doc/source/composer.md (+123/-0) ez_setup.py (+0/-272) helpers/python/charmhelpers/tests/test_charmhelpers.py (+1/-1) requirements.txt (+18/-13) scripts/packages.sh (+19/-0) scripts/test (+0/-7) setup.cfg (+8/-0) setup.py (+16/-16) tests/interfaces/mysql/interface.yaml (+1/-0) tests/interfaces/mysql/provides.py (+1/-0) tests/interfaces/mysql/requires.py (+1/-0) tests/test_charm_generate.py (+0/-7) tests/test_compose.py (+239/-0) tests/test_config.py (+29/-0) tests/test_juju_test.py (+0/-9) tests/test_utils.py (+43/-0) tests/trusty/a/README.md (+1/-0) tests/trusty/a/a (+1/-0) tests/trusty/b/README.md (+1/-0) tests/trusty/b/composer.yaml (+1/-0) tests/trusty/b/metadata.yaml (+11/-0) tests/trusty/c-reactive/README.md (+1/-0) tests/trusty/c-reactive/composer.yaml (+1/-0) tests/trusty/c-reactive/hooks/reactive/main.py (+6/-0) tests/trusty/c/README.md (+1/-0) tests/trusty/c/composer.yaml (+1/-0) tests/trusty/c/metadata.yaml (+11/-0) tests/trusty/chlayer/hooks/charmhelpers.pypi (+1/-0) tests/trusty/mysql/.bzrignore (+2/-0) tests/trusty/mysql/Makefile (+24/-0) tests/trusty/mysql/README.md (+133/-0) tests/trusty/mysql/charm-helpers.yaml (+9/-0) tests/trusty/mysql/config.yaml (+141/-0) tests/trusty/mysql/copyright (+17/-0) tests/trusty/mysql/hooks/charmhelpers/__init__.py (+38/-0) tests/trusty/mysql/hooks/charmhelpers/contrib/__init__.py (+15/-0) tests/trusty/mysql/hooks/charmhelpers/contrib/charmsupport/nrpe.py (+219/-0) tests/trusty/mysql/hooks/charmhelpers/contrib/charmsupport/volumes.py (+156/-0) tests/trusty/mysql/hooks/charmhelpers/contrib/database/mysql.py (+385/-0) tests/trusty/mysql/hooks/charmhelpers/contrib/network/__init__.py (+15/-0) tests/trusty/mysql/hooks/charmhelpers/contrib/network/ip.py (+450/-0) tests/trusty/mysql/hooks/charmhelpers/contrib/peerstorage/__init__.py (+148/-0) tests/trusty/mysql/hooks/charmhelpers/core/__init__.py (+15/-0) tests/trusty/mysql/hooks/charmhelpers/core/decorators.py (+41/-0) tests/trusty/mysql/hooks/charmhelpers/core/fstab.py (+134/-0) tests/trusty/mysql/hooks/charmhelpers/core/hookenv.py (+568/-0) tests/trusty/mysql/hooks/charmhelpers/core/host.py (+446/-0) tests/trusty/mysql/hooks/charmhelpers/core/services/__init__.py (+18/-0) tests/trusty/mysql/hooks/charmhelpers/core/services/base.py (+329/-0) tests/trusty/mysql/hooks/charmhelpers/core/services/helpers.py (+267/-0) tests/trusty/mysql/hooks/charmhelpers/core/strutils.py (+42/-0) tests/trusty/mysql/hooks/charmhelpers/core/sysctl.py (+56/-0) tests/trusty/mysql/hooks/charmhelpers/core/templating.py (+69/-0) tests/trusty/mysql/hooks/charmhelpers/core/unitdata.py (+477/-0) tests/trusty/mysql/hooks/charmhelpers/fetch/__init__.py (+439/-0) tests/trusty/mysql/hooks/charmhelpers/fetch/archiveurl.py (+161/-0) tests/trusty/mysql/hooks/charmhelpers/fetch/bzrurl.py (+78/-0) tests/trusty/mysql/hooks/charmhelpers/fetch/giturl.py (+71/-0) tests/trusty/mysql/hooks/common.py (+109/-0) tests/trusty/mysql/hooks/config-changed (+414/-0) tests/trusty/mysql/hooks/data-relation.py (+31/-0) tests/trusty/mysql/hooks/db-relation-broken (+21/-0) tests/trusty/mysql/hooks/db-relation-joined (+87/-0) tests/trusty/mysql/hooks/ha_relations.py (+163/-0) tests/trusty/mysql/hooks/install (+49/-0) tests/trusty/mysql/hooks/master-relation-changed (+95/-0) tests/trusty/mysql/hooks/monitors-relation-broken (+8/-0) tests/trusty/mysql/hooks/monitors-relation-departed (+3/-0) tests/trusty/mysql/hooks/monitors-relation-joined (+9/-0) tests/trusty/mysql/hooks/monitors.common.bash (+8/-0) tests/trusty/mysql/hooks/munin-relation-changed (+26/-0) tests/trusty/mysql/hooks/munin-relation-joined (+6/-0) tests/trusty/mysql/hooks/nrpe_relations.py (+91/-0) tests/trusty/mysql/hooks/shared_db_relations.py (+153/-0) tests/trusty/mysql/hooks/slave-relation-broken (+11/-0) tests/trusty/mysql/hooks/slave-relation-changed (+89/-0) tests/trusty/mysql/hooks/slave-relation-joined (+2/-0) tests/trusty/mysql/hooks/start (+5/-0) tests/trusty/mysql/hooks/stop (+3/-0) tests/trusty/mysql/hooks/upgrade-charm (+27/-0) tests/trusty/mysql/icon.svg (+335/-0) tests/trusty/mysql/keys/repo.percona.com (+30/-0) tests/trusty/mysql/metadata.yaml (+43/-0) tests/trusty/mysql/monitors.yaml (+13/-0) tests/trusty/mysql/revision (+1/-0) tests/trusty/mysql/scripts/add_to_cluster (+13/-0) tests/trusty/mysql/scripts/charm_helpers_sync.py (+225/-0) tests/trusty/mysql/scripts/mysql_backup.sh (+30/-0) tests/trusty/mysql/scripts/remove_from_cluster (+4/-0) tests/trusty/mysql/templates/apparmor.j2 (+15/-0) tests/trusty/mysql/templates/mysql_backup.j2 (+12/-0) tests/trusty/mysql/tests/00-setup (+12/-0) tests/trusty/mysql/tests/15-configs (+77/-0) tests/trusty/mysql/unit_tests/test_mysql_common.py (+18/-0) tests/trusty/tester/README.md (+1/-0) tests/trusty/tester/composer.yaml (+8/-0) tests/trusty/tester/generate/custom.py (+17/-0) tests/trusty/tester/hooks/start (+1/-0) tests/trusty/tester/metadata.yaml (+14/-0) tests/trusty/use-layers/README.md (+1/-0) tests/trusty/use-layers/composer.yaml (+1/-0) tests/trusty/use-layers/hooks/reactive/main.py (+6/-0) tests_functional/add/test.sh (+2/-2) tests_functional/create/test.sh (+4/-2) tests_functional/proof/record.sh (+1/-1) tox.ini (+21/-0) |
To merge this branch: | bzr merge lp:~bcsaller/charm-tools/composer |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Tim Van Steenburgh (community) | Approve | ||
Cory Johns (community) | Needs Fixing | ||
Marco Ceppi | Pending | ||
Review via email: mp+266281@code.launchpad.net |
Commit message
Description of the change
This adds the composer stuff
and ports the probject to use tox
Cory Johns (johnsca) wrote : | # |
Cory Johns (johnsca) : | # |
Adam Israel (aisrael) wrote : | # |
Hi Ben,
Per our earlier conversation, I'd also like to see some documentation on how a charm author would consider and use composer. I'm very excited to see that myself, and give composer a spin.
- 358. By Benjamin Saller
- 359. By Benjamin Saller
-
various composer fixes
- 360. By Benjamin Saller
-
without the pdb
- 361. By Benjamin Saller
-
show paths and so on with -l DEBUG, tests do less actual remote work
- 362. By Benjamin Saller
-
compose cli help a little better
- 363. By Benjamin Saller
-
fix bzrignore
- 364. By Benjamin Saller
-
fix typo
Charles Butler (lazypower) wrote : | # |
Sorry this took me so long to circle back, but I've tried this branch of charm tools and it appears the manifest is not fetching all the dependencies.
What i did:
bzr branch lp:~bcsaller/charm-tools/composer/
virtualenv .venv
source .venv/bin/activate
pip install ./
charm compose -h
ImportError: No module named path
pip install path.py
charm compose -h
ImportError: No module named otherstuf
pip install otherstuf
charm compose -h
OSError: [Errno 2] No such file or directory: '/home/
Once I ran through that dependency hoop, compose appears to be sorted and available.
Cory Johns (johnsca) wrote : | # |
Fix for the missing deps and help error here: https:/
Cory Johns (johnsca) wrote : | # |
Updated my most recent MP to fix the default value for --name when composing current dir (e.g., "charm compose ." or just "charm compose")
- 365. By Benjamin Saller
-
fix deps in setup, better seaching for charm.name
- 366. By Benjamin Saller
-
change default interface address to public one
- 367. By Benjamin Saller
-
update to work with real DNS and service
- 368. By Benjamin Saller
-
remove find name call
- 369. By Benjamin Saller
-
various fixes around naming and patch to install to ignore existing deps, also depend on modern pip
- 370. By Benjamin Saller
-
repair tests
- 371. By Benjamin Saller
-
fix/remove broken tests w/updated bundletester
- 372. By Benjamin Saller
-
patch for inspect to work with more varied naming
- 373. By Benjamin Saller
-
force key order on metadata.yaml merges
- 374. By Benjamin Saller
-
fix tests (to reflect remote changes to basic layer) and ordered metadata rendering
- 375. By Benjamin Saller
-
update notes on workflow
- 376. By Benjamin Saller
-
various fixes and cleanups. Change installer to do better signing
- 377. By Benjamin Saller
-
merge trunk
Tim Van Steenburgh (tvansteenburgh) wrote : | # |
I'm eager to merge this but can't get the tests to run. I always run `make check` on charm-tools since that includes the integration tests as well. According to the Makefile that should still work, but I get:
tvansteenburgh@
find . -name '*.py[co]' -delete
find . -type f -name '*~' -delete
find . -name '*.bak' -delete
rm -rf bin include lib local man dependencies
tvansteenburgh@
bzr checkout lp:~juju-jitsu/charm-tools/dependencies
tox --develop
py27 create: /tmp/charm-
py27 installdeps: -r/tmp/
ERROR: invocation failed, logfile: /tmp/charm-
ERROR: actionid=py27
msg=getenv
cmdargs=
env={'BYOBU_TTY': '/dev/pts/1', 'UPSTART_EVENTS': 'started starting', 'SHELL': '/bin/bash', 'XDG_DATA_DIRS': '/usr/share/
Tim Van Steenburgh (tvansteenburgh) wrote : | # |
I got integration tests to run by changing this in the Makefile:
-build: deps develop
+build: deps
But then I got a bunch of legit failures from the tests:
2 tvansteenburgh@
tests_functiona
Fri Aug 28 11:04:24 EDT 2015: Testing ch_apparmor_
Fri Aug 28 11:04:24 EDT 2015: Testing ch_type_hash...PASS
Fri Aug 28 11:04:24 EDT 2015: Testing ch_is_url...PASS
Fri Aug 28 11:04:24 EDT 2015: Testing Testing ch_is_ip...PASS
Fri Aug 28 11:04:24 EDT 2015: Testing Testing ch_get_ip...PASS
Fri Aug 28 11:04:24 EDT 2015: Starting SimpleHTTPServer in /tmp/charm-
Fri Aug 28 11:04:24 EDT 2015: Looping wget until webserver responds...
Fri Aug 28 11:04:25 EDT 2015: Attempt 1 succeeded.
Fri Aug 28 11:04:25 EDT 2015: Creating temp data file
Fri Aug 28 11:04:25 EDT 2015: creating gzipped test data
Fri Aug 28 11:04:25 EDT 2015: Testing ch_get_file...PASS
Fri Aug 28 11:04:25 EDT 2015: Shutting down webserver...DONE
Fri Aug 28 11:04:25 EDT 2015: Printing server log
127.0.0.1 - - [28/Aug/2015 11:04:25] "GET / HTTP/1.1" 200 -
127.0.0.1 - - [28/Aug/2015 11:04:25] "GET /testdata.txt HTTP/1.1" 200 -
127.0.0.1 - - [28/Aug/2015 11:04:25] "GET /testdata.txt HTTP/1.1" 200 -
127.0.0.1 - - [28/Aug/2015 11:04:25] "GET /testdata.txt HTTP/1.1" 200 -
127.0.0.1 - - [28/Aug/2015 11:04:25] "GET /testdata.txt.gz HTTP/1.1" 200 -
127.0.0.1 - - [28/Aug/2015 11:04:25] "GET /testdata.txt.gz HTTP/1.1" 200 -
127.0.0.1 - - [28/Aug/2015 11:04:25] "GET /testdata.txt.gz HTTP/1.1" 200 -
127.0.0.1 - - [28/Aug/2015 11:04:25] "GET /testdata.txt HTTP/1.1" 200 -
Fri Aug 28 11:04:25 EDT 2015: Testing ch_unit_name...PASS
Fri Aug 28 11:04:25 EDT 2015: Testing ch_unit_id...PASS
Fri Aug 28 11:04:25 EDT 2015: Testing ch_my_unit_
Test shell helpers with bash
bash tests_functiona
|| bash -x tests_functiona
Fri Aug 28 11:04:25 EDT 2015: Testing ch_apparmor_
Fri Aug 28 11:04:25 EDT 2015: Testing ch_type_hash...PASS
Fri Aug 28 11:04:25 EDT 2015: Testing ch_is_url...PASS
Fri Aug 28 11:04:25 EDT 2015: Testing Testing ch_is_ip...PASS
Fri Aug 28 11:04:25 EDT 2015: Testing Testing ch_get_ip...PASS
Fri Aug 28 11:04:25 EDT 2015: Starting SimpleHTTPServer in /tmp/charm-
Fri Aug 28 11:04:25 EDT 2015: Looping wget until webserver responds...
Fri Aug 28 11:04:26 EDT 2015: Attempt 1 succeeded.
Fri Aug 28 11:04:26 EDT 2015: Creating temp data file
Fri Aug 28 11:04:26 EDT 2015: creating gzipped test data
Fri Aug 28 11:04:26 EDT 2015: Testing ch_get_file...PASS
Fri Aug 28 11:04:26 EDT 2015: Shutting down webserver...DONE
Fri Aug 28 11:04:26 EDT 2015: Printing server log
127.0.0.1 - - [28/Aug/2015 11:04:26] "GET / HTTP/1.1" 200 -
127.0.0.1 - - [28/Aug/2015 11:04:26] "GET /testdata.txt HTTP/1.1" 200 -
127.0.0.1 - - [28/Aug/2015 11:04:26] "GET /testdata.txt HTTP/1.1" 200 -
127.0.0.1 - - [28/Aug/2015 11:04:26] "GET /testdata.txt HTTP/1.1" 200 -
127.0.0.1 - - [28/Aug/2015 11:04:26] "GET /testdata.txt.gz HTTP/1.1" 200 -
127.0.0...
- 378. By Benjamin Saller
-
merge trunk
- 379. By Benjamin Saller
-
path fixes on integration tests
Benjamin Saller (bcsaller) wrote : | # |
should be fixed, very minor pathing issues in the tests to point to the tox virtualenv
- 380. By Benjamin Saller
-
notest on develop
- 381. By Benjamin Saller
-
pass term env
- 382. By Benjamin Saller
-
move passenv
Tim Van Steenburgh (tvansteenburgh) wrote : | # |
Tests pass after upgrading tox to 2.1.1 (1.6.0 failed), LGTM.
Preview Diff
1 | === modified file '.bzrignore' | |||
2 | --- .bzrignore 2014-01-14 03:23:17 +0000 | |||
3 | +++ .bzrignore 2015-08-31 19:32:56 +0000 | |||
4 | @@ -1,4 +1,4 @@ | |||
6 | 1 | tests/proof/results/* | 1 | tests_functional/proof/results/* |
7 | 2 | .coverage | 2 | .coverage |
8 | 3 | .noseids | 3 | .noseids |
9 | 4 | charm_tools.egg-info | 4 | charm_tools.egg-info |
10 | @@ -13,3 +13,5 @@ | |||
11 | 13 | dist | 13 | dist |
12 | 14 | tests/.ropeproject/ | 14 | tests/.ropeproject/ |
13 | 15 | charmtools/.ropeproject/ | 15 | charmtools/.ropeproject/ |
14 | 16 | .ropeproject/ | ||
15 | 17 | .tox/ | ||
16 | 16 | 18 | ||
17 | === modified file 'MANIFEST.in' | |||
18 | --- MANIFEST.in 2014-06-19 23:18:53 +0000 | |||
19 | +++ MANIFEST.in 2015-08-31 19:32:56 +0000 | |||
20 | @@ -1,3 +1,4 @@ | |||
21 | 1 | include *.py README* | 1 | include *.py README* |
22 | 2 | include doc/source/composer.md | ||
23 | 2 | recursive-include charmtools * | 3 | recursive-include charmtools * |
24 | 3 | recursive-exclude charmtools *.pyc | 4 | recursive-exclude charmtools *.pyc |
25 | 4 | 5 | ||
26 | === modified file 'Makefile' | |||
27 | --- Makefile 2014-06-20 19:46:59 +0000 | |||
28 | +++ Makefile 2015-08-31 19:32:56 +0000 | |||
29 | @@ -17,44 +17,23 @@ | |||
30 | 17 | confdir = $(DESTDIR)/etc | 17 | confdir = $(DESTDIR)/etc |
31 | 18 | INSTALL = install | 18 | INSTALL = install |
32 | 19 | 19 | ||
40 | 20 | # We use a "canary" file to tell us if the package has been installed in | 20 | develop: |
41 | 21 | # "develop" mode. | 21 | tox --develop --notest |
35 | 22 | DEVELOP_CANARY := lib/__develop_canary | ||
36 | 23 | develop: $(DEVELOP_CANARY) | ||
37 | 24 | $(DEVELOP_CANARY): | python-deps | ||
38 | 25 | bin/python setup.py develop | ||
39 | 26 | touch $(DEVELOP_CANARY) | ||
42 | 27 | 22 | ||
44 | 28 | build: deps develop bin/test | 23 | build: deps develop |
45 | 29 | 24 | ||
46 | 30 | dependencies: | 25 | dependencies: |
47 | 31 | bzr checkout lp:~juju-jitsu/charm-tools/dependencies | 26 | bzr checkout lp:~juju-jitsu/charm-tools/dependencies |
48 | 32 | 27 | ||
60 | 33 | # We use a "canary" file to tell us if the Python packages have been installed. | 28 | PYTHON_DEPS=build-essential bzr python-dev python-tox |
61 | 34 | PYTHON_PACKAGE_CANARY := lib/python2.7/site-packages/___canary | 29 | python-deps: scripts/packages.sh |
62 | 35 | python-deps: $(PYTHON_PACKAGE_CANARY) | 30 | $(if $(shell ./scripts/packages.sh $(PYTHON_DEPS)), \ |
63 | 36 | $(PYTHON_PACKAGE_CANARY): requirements.txt | dependencies | 31 | tox -r --notest) |
53 | 37 | sudo apt-get update | ||
54 | 38 | sudo apt-get install -y build-essential bzr python-dev \ | ||
55 | 39 | python-virtualenv | ||
56 | 40 | virtualenv . | ||
57 | 41 | bin/pip install --no-index --no-dependencies --find-links \ | ||
58 | 42 | file:///$(WD)/dependencies/python -r requirements.txt | ||
59 | 43 | touch $(PYTHON_PACKAGE_CANARY) | ||
64 | 44 | 32 | ||
65 | 45 | deps: python-deps | dependencies | 33 | deps: python-deps | dependencies |
66 | 46 | 34 | ||
78 | 47 | bin/nosetests: python-deps | 35 | test: |
79 | 48 | 36 | tox | |
69 | 49 | bin/test: | bin/nosetests | ||
70 | 50 | ln scripts/test bin/test | ||
71 | 51 | |||
72 | 52 | test: build bin/test | ||
73 | 53 | bin/test | ||
74 | 54 | |||
75 | 55 | lint: sources = setup.py charmtools | ||
76 | 56 | lint: build | ||
77 | 57 | @find $(sources) -name '*.py' -print0 | xargs -r0 bin/flake8 | ||
80 | 58 | 37 | ||
81 | 59 | tags: | 38 | tags: |
82 | 60 | ctags --tag-relative --python-kinds=-iv -Rf tags --sort=yes \ | 39 | ctags --tag-relative --python-kinds=-iv -Rf tags --sort=yes \ |
83 | @@ -93,12 +72,11 @@ | |||
84 | 93 | tests_functional/proof/test.sh | 72 | tests_functional/proof/test.sh |
85 | 94 | tests_functional/create/test.sh | 73 | tests_functional/create/test.sh |
86 | 95 | tests_functional/add/test.sh | 74 | tests_functional/add/test.sh |
93 | 96 | # PYTHONPATH=helpers/python python helpers/python/charmhelpers/tests/test_charmhelpers.py | 75 | |
94 | 97 | 76 | coverage: build | |
95 | 98 | coverage: build bin/test | 77 | tox |
96 | 99 | bin/test --with-coverage --cover-package=charmtools --cover-tests | 78 | |
97 | 100 | 79 | check: build integration test | |
92 | 101 | check: build integration test lint | ||
98 | 102 | 80 | ||
99 | 103 | define phony | 81 | define phony |
100 | 104 | build | 82 | build |
101 | @@ -106,7 +84,6 @@ | |||
102 | 106 | clean | 84 | clean |
103 | 107 | deps | 85 | deps |
104 | 108 | install | 86 | install |
105 | 109 | lint | ||
106 | 110 | tags | 87 | tags |
107 | 111 | test | 88 | test |
108 | 112 | endef | 89 | endef |
109 | 113 | 90 | ||
110 | === added directory 'charmtools/compose' | |||
111 | === added file 'charmtools/compose/__init__.py' | |||
112 | --- charmtools/compose/__init__.py 1970-01-01 00:00:00 +0000 | |||
113 | +++ charmtools/compose/__init__.py 2015-08-31 19:32:56 +0000 | |||
114 | @@ -0,0 +1,466 @@ | |||
115 | 1 | #!/usr/bin/env python | ||
116 | 2 | # -*- coding: utf-8 -*- | ||
117 | 3 | import argparse | ||
118 | 4 | import json | ||
119 | 5 | import logging | ||
120 | 6 | import os | ||
121 | 7 | import sys | ||
122 | 8 | |||
123 | 9 | import blessings | ||
124 | 10 | from collections import OrderedDict | ||
125 | 11 | from path import path | ||
126 | 12 | import yaml | ||
127 | 13 | from charmtools.compose import inspector | ||
128 | 14 | import charmtools.compose.tactics | ||
129 | 15 | from charmtools.compose.config import (ComposerConfig, DEFAULT_IGNORES) | ||
130 | 16 | from charmtools.compose.fetchers import (InterfaceFetcher, | ||
131 | 17 | LayerFetcher, | ||
132 | 18 | get_fetcher, | ||
133 | 19 | FetchError) | ||
134 | 20 | from charmtools import utils | ||
135 | 21 | |||
136 | 22 | log = logging.getLogger("composer") | ||
137 | 23 | |||
138 | 24 | |||
139 | 25 | class Configable(object): | ||
140 | 26 | CONFIG_FILE = None | ||
141 | 27 | |||
142 | 28 | def __init__(self): | ||
143 | 29 | self._config = ComposerConfig() | ||
144 | 30 | self.config_file = None | ||
145 | 31 | |||
146 | 32 | @property | ||
147 | 33 | def config(self): | ||
148 | 34 | if self._config.configured: | ||
149 | 35 | return self._config | ||
150 | 36 | if self.config_file and self.config_file.exists(): | ||
151 | 37 | self._config.configure(self.config_file) | ||
152 | 38 | return self._config | ||
153 | 39 | |||
154 | 40 | @property | ||
155 | 41 | def configured(self): | ||
156 | 42 | return bool(self.config is not None and self.config.configured) | ||
157 | 43 | |||
158 | 44 | |||
159 | 45 | class Fetched(Configable): | ||
160 | 46 | def __init__(self, url, target_repo, name=None): | ||
161 | 47 | super(Fetched, self).__init__() | ||
162 | 48 | self.url = url | ||
163 | 49 | self.target_repo = target_repo | ||
164 | 50 | self.directory = None | ||
165 | 51 | self._name = name | ||
166 | 52 | |||
167 | 53 | @property | ||
168 | 54 | def name(self): | ||
169 | 55 | if self._name: | ||
170 | 56 | return self._name | ||
171 | 57 | if self.url.startswith(self.NAMESPACE): | ||
172 | 58 | return self.url[len(self.NAMESPACE):] | ||
173 | 59 | return self.url | ||
174 | 60 | |||
175 | 61 | def __repr__(self): | ||
176 | 62 | return "<{} {}:{}>".format(self.__class__.__name__, | ||
177 | 63 | self.url, self.directory) | ||
178 | 64 | |||
179 | 65 | def __div__(self, other): | ||
180 | 66 | return self.directory / other | ||
181 | 67 | |||
182 | 68 | def fetch(self): | ||
183 | 69 | try: | ||
184 | 70 | fetcher = get_fetcher(self.url) | ||
185 | 71 | except FetchError: | ||
186 | 72 | # We might be passing a local dir path directly | ||
187 | 73 | # which fetchers don't currently support | ||
188 | 74 | self.directory = path(self.url) | ||
189 | 75 | else: | ||
190 | 76 | if hasattr(fetcher, "path") and fetcher.path.exists(): | ||
191 | 77 | self.directory = path(fetcher.path) | ||
192 | 78 | else: | ||
193 | 79 | if not self.target_repo.exists(): | ||
194 | 80 | self.target_repo.makedirs_p() | ||
195 | 81 | self.directory = path(fetcher.fetch(self.target_repo)) | ||
196 | 82 | |||
197 | 83 | if not self.directory.exists(): | ||
198 | 84 | raise OSError( | ||
199 | 85 | "Unable to locate {}. " | ||
200 | 86 | "Do you need to set {}?".format( | ||
201 | 87 | self.url, self.ENVIRON)) | ||
202 | 88 | |||
203 | 89 | self.config_file = self.directory / self.CONFIG_FILE | ||
204 | 90 | self._name = self.config.name | ||
205 | 91 | return self | ||
206 | 92 | |||
207 | 93 | |||
208 | 94 | class Interface(Fetched): | ||
209 | 95 | CONFIG_FILE = "interface.yaml" | ||
210 | 96 | NAMESPACE = "interface" | ||
211 | 97 | ENVIRON = "INTERFACE_PATH" | ||
212 | 98 | |||
213 | 99 | |||
214 | 100 | class Layer(Fetched): | ||
215 | 101 | CONFIG_FILE = "composer.yaml" | ||
216 | 102 | NAMESPACE = "layer" | ||
217 | 103 | ENVIRON = "COMPOSER_PATH" | ||
218 | 104 | |||
219 | 105 | |||
220 | 106 | class Composer(object): | ||
221 | 107 | """ | ||
222 | 108 | Handle the processing of overrides, implements the policy of ComposerConfig | ||
223 | 109 | """ | ||
224 | 110 | PHASES = ['lint', 'read', 'call', 'sign', 'build'] | ||
225 | 111 | |||
226 | 112 | def __init__(self): | ||
227 | 113 | self.config = ComposerConfig() | ||
228 | 114 | self.force = False | ||
229 | 115 | self._name = None | ||
230 | 116 | self._charm = None | ||
231 | 117 | |||
232 | 118 | @property | ||
233 | 119 | def charm(self): | ||
234 | 120 | return self._charm | ||
235 | 121 | |||
236 | 122 | @charm.setter | ||
237 | 123 | def charm(self, value): | ||
238 | 124 | self._charm = path(value) | ||
239 | 125 | |||
240 | 126 | @property | ||
241 | 127 | def name(self): | ||
242 | 128 | if self._name: | ||
243 | 129 | return self._name | ||
244 | 130 | |||
245 | 131 | # optionally extract name from the top layer | ||
246 | 132 | self._name = str(path(self.charm).abspath().name) | ||
247 | 133 | |||
248 | 134 | # however if the current layer has a metadata.yaml we can | ||
249 | 135 | # use its name | ||
250 | 136 | md = path(self.charm) / "metadata.yaml" | ||
251 | 137 | if md.exists(): | ||
252 | 138 | data = yaml.load(md.open()) | ||
253 | 139 | name = data.get("name") | ||
254 | 140 | if name: | ||
255 | 141 | self._name = name | ||
256 | 142 | return self._name | ||
257 | 143 | |||
258 | 144 | @name.setter | ||
259 | 145 | def name(self, value): | ||
260 | 146 | self._name = value | ||
261 | 147 | |||
262 | 148 | @property | ||
263 | 149 | def charm_name(self): | ||
264 | 150 | return "{}/{}".format(self.series, self.name) | ||
265 | 151 | |||
266 | 152 | def status(self): | ||
267 | 153 | result = {} | ||
268 | 154 | result.update(vars(self)) | ||
269 | 155 | for e in ["COMPOSER_PATH", "INTERFACE_PATH", "JUJU_REPOSITORY"]: | ||
270 | 156 | result[e] = os.environ.get(e) | ||
271 | 157 | return result | ||
272 | 158 | |||
273 | 159 | def create_repo(self): | ||
274 | 160 | # Generated output will go into this directory | ||
275 | 161 | base = path(self.output_dir) | ||
276 | 162 | self.repo = (base / self.series) | ||
277 | 163 | # And anything it includes from will be placed here | ||
278 | 164 | # outside the series | ||
279 | 165 | self.deps = (base / "deps" / self.series) | ||
280 | 166 | self.target_dir = (self.repo / self.name) | ||
281 | 167 | |||
282 | 168 | def find_or_create_repo(self, allow_create=True): | ||
283 | 169 | # see if output dir is already in a repo, we can use that directly | ||
284 | 170 | if self.output_dir == path(self.charm).normpath(): | ||
285 | 171 | # we've indicated in the cmdline that we are doing an inplace | ||
286 | 172 | # update | ||
287 | 173 | if self.output_dir.parent.basename() == self.series: | ||
288 | 174 | # we're already in a repo | ||
289 | 175 | self.repo = self.output_dir.parent.parent | ||
290 | 176 | self.deps = (self.repo / "deps" / self.series) | ||
291 | 177 | self.target_dir = self.output_dir | ||
292 | 178 | return | ||
293 | 179 | if allow_create: | ||
294 | 180 | self.create_repo() | ||
295 | 181 | else: | ||
296 | 182 | raise ValueError("%s doesn't seem valid", self.charm.directory) | ||
297 | 183 | |||
298 | 184 | def fetch(self): | ||
299 | 185 | layer = Layer(self.charm, self.deps).fetch() | ||
300 | 186 | if not layer.configured: | ||
301 | 187 | log.info("The top level layer expects a " | ||
302 | 188 | "valid composer.yaml file, " | ||
303 | 189 | "using defaults.") | ||
304 | 190 | # Manually create a layer object for the output | ||
305 | 191 | self.target = Layer(self.name, self.repo) | ||
306 | 192 | self.target.directory = self.target_dir | ||
307 | 193 | return self.fetch_deps(layer) | ||
308 | 194 | |||
309 | 195 | def fetch_deps(self, layer): | ||
310 | 196 | results = {"layers": [], "interfaces": []} | ||
311 | 197 | self.fetch_dep(layer, results) | ||
312 | 198 | # results should now be a bottom up list | ||
313 | 199 | # of deps. Using the in order results traversal | ||
314 | 200 | # we can build out our plan for each file in the | ||
315 | 201 | # output layer | ||
316 | 202 | results["layers"].append(layer) | ||
317 | 203 | self._layers = results["layers"] | ||
318 | 204 | self._interfaces = results["interfaces"] | ||
319 | 205 | return results | ||
320 | 206 | |||
321 | 207 | @property | ||
322 | 208 | def layers(self): | ||
323 | 209 | layers = [] | ||
324 | 210 | for i in self._layers: | ||
325 | 211 | layers.append(i.url) | ||
326 | 212 | for i in self._interfaces: | ||
327 | 213 | layers.append(i.url) | ||
328 | 214 | layers.append("composer") | ||
329 | 215 | return layers | ||
330 | 216 | |||
331 | 217 | def fetch_dep(self, layer, results): | ||
332 | 218 | # Recursively fetch and scan layers | ||
333 | 219 | # This returns a plan for each file in the result | ||
334 | 220 | baselayers = layer.config.get('includes', []) | ||
335 | 221 | if not baselayers: | ||
336 | 222 | # no deps, this is possible for any base | ||
337 | 223 | # but questionable for the target | ||
338 | 224 | return | ||
339 | 225 | |||
340 | 226 | if isinstance(baselayers, str): | ||
341 | 227 | baselayers = [baselayers] | ||
342 | 228 | |||
343 | 229 | for base in baselayers: | ||
344 | 230 | if base.startswith("interface:"): | ||
345 | 231 | iface = Interface(base, self.deps).fetch() | ||
346 | 232 | results["interfaces"].append(iface) | ||
347 | 233 | else: | ||
348 | 234 | base_layer = Layer(base, self.deps).fetch() | ||
349 | 235 | self.fetch_dep(base_layer, results) | ||
350 | 236 | results["layers"].append(base_layer) | ||
351 | 237 | |||
352 | 238 | def build_tactics(self, entry, current, config, output_files): | ||
353 | 239 | # Delegate to the config object, it's rules | ||
354 | 240 | # will produce a tactic | ||
355 | 241 | relname = entry.relpath(current.directory) | ||
356 | 242 | current = current.config.tactic(entry, current, self.target, config) | ||
357 | 243 | existing = output_files.get(relname) | ||
358 | 244 | if existing is not None: | ||
359 | 245 | tactic = current.combine(existing) | ||
360 | 246 | else: | ||
361 | 247 | tactic = current | ||
362 | 248 | output_files[relname] = tactic | ||
363 | 249 | |||
364 | 250 | def plan_layers(self, layers, output_files): | ||
365 | 251 | config = ComposerConfig() | ||
366 | 252 | config = config.add_config( | ||
367 | 253 | layers["layers"][0] / ComposerConfig.DEFAULT_FILE, True) | ||
368 | 254 | |||
369 | 255 | layers["layers"][-1].url = self.name | ||
370 | 256 | |||
371 | 257 | for i, layer in enumerate(layers["layers"]): | ||
372 | 258 | log.info("Processing layer: %s", layer.url) | ||
373 | 259 | if i + 1 < len(layers["layers"]): | ||
374 | 260 | next_layer = layers["layers"][i + 1] | ||
375 | 261 | config = config.add_config( | ||
376 | 262 | next_layer / ComposerConfig.DEFAULT_FILE, True) | ||
377 | 263 | list(e for e in utils.walk(layer.directory, | ||
378 | 264 | self.build_tactics, | ||
379 | 265 | current=layer, | ||
380 | 266 | config=config, | ||
381 | 267 | output_files=output_files)) | ||
382 | 268 | plan = [t for t in output_files.values() if t] | ||
383 | 269 | return plan | ||
384 | 270 | |||
385 | 271 | def plan_interfaces(self, layers, output_files, plan): | ||
386 | 272 | # Interface includes don't directly map to output files | ||
387 | 273 | # as they are computed in combination with the metadata.yaml | ||
388 | 274 | charm_meta = output_files.get("metadata.yaml") | ||
389 | 275 | if charm_meta: | ||
390 | 276 | meta = charm_meta() | ||
391 | 277 | if not meta: | ||
392 | 278 | return | ||
393 | 279 | target_config = layers["layers"][-1].config | ||
394 | 280 | specs = [] | ||
395 | 281 | used_interfaces = set() | ||
396 | 282 | for kind in ("provides", "requires", "peer"): | ||
397 | 283 | for k, v in meta.get(kind, {}).items(): | ||
398 | 284 | # ex: ["provides", "db", "mysql"] | ||
399 | 285 | specs.append([kind, k, v["interface"]]) | ||
400 | 286 | used_interfaces.add(v["interface"]) | ||
401 | 287 | |||
402 | 288 | for iface in layers["interfaces"]: | ||
403 | 289 | if iface.name not in used_interfaces: | ||
404 | 290 | # we shouldn't include something the charm doesn't use | ||
405 | 291 | log.warn("composer.yaml includes {} which isn't " | ||
406 | 292 | "used in metadata.yaml".format( | ||
407 | 293 | iface.name)) | ||
408 | 294 | continue | ||
409 | 295 | for kind, relation_name, interface_name in specs: | ||
410 | 296 | if interface_name != iface.name: | ||
411 | 297 | continue | ||
412 | 298 | # COPY phase | ||
413 | 299 | plan.append( | ||
414 | 300 | charmtools.compose.tactics.InterfaceCopy( | ||
415 | 301 | iface, relation_name, | ||
416 | 302 | self.target, target_config) | ||
417 | 303 | ) | ||
418 | 304 | # Link Phase | ||
419 | 305 | plan.append( | ||
420 | 306 | charmtools.compose.tactics.InterfaceBind( | ||
421 | 307 | iface, relation_name, kind, | ||
422 | 308 | self.target, target_config)) | ||
423 | 309 | elif not charm_meta and layers["interfaces"]: | ||
424 | 310 | raise ValueError( | ||
425 | 311 | "Includes interfaces but no metadata.yaml to bind them") | ||
426 | 312 | |||
427 | 313 | def formulate_plan(self, layers): | ||
428 | 314 | """Build out a plan for each file in the various composed | ||
429 | 315 | layers, taking into account config at each layer""" | ||
430 | 316 | output_files = OrderedDict() | ||
431 | 317 | self.plan = self.plan_layers(layers, output_files) | ||
432 | 318 | self.plan_interfaces(layers, output_files, self.plan) | ||
433 | 319 | return self.plan | ||
434 | 320 | |||
435 | 321 | def exec_plan(self, plan=None, layers=None): | ||
436 | 322 | signatures = {} | ||
437 | 323 | cont = True | ||
438 | 324 | for phase in self.PHASES: | ||
439 | 325 | for tactic in plan: | ||
440 | 326 | if phase == "lint": | ||
441 | 327 | cont &= tactic.lint() | ||
442 | 328 | if cont is False and self.force is not True: | ||
443 | 329 | break | ||
444 | 330 | elif phase == "read": | ||
445 | 331 | # We use a read (into memory phase to make layer comps | ||
446 | 332 | # simpler) | ||
447 | 333 | tactic.read() | ||
448 | 334 | elif phase == "call": | ||
449 | 335 | tactic() | ||
450 | 336 | elif phase == "sign": | ||
451 | 337 | sig = tactic.sign() | ||
452 | 338 | if sig: | ||
453 | 339 | signatures.update(sig) | ||
454 | 340 | elif phase == "build": | ||
455 | 341 | tactic.build() | ||
456 | 342 | # write out the sigs | ||
457 | 343 | if "sign" in self.PHASES: | ||
458 | 344 | self.write_signatures(signatures, layers) | ||
459 | 345 | |||
460 | 346 | def write_signatures(self, signatures, layers): | ||
461 | 347 | sigs = self.target / ".composer.manifest" | ||
462 | 348 | signatures['.composer.manifest'] = ["composer", 'dynamic', 'unchecked'] | ||
463 | 349 | sigs.write_text(json.dumps(dict( | ||
464 | 350 | signatures=signatures, | ||
465 | 351 | layers=layers, | ||
466 | 352 | ), indent=2)) | ||
467 | 353 | |||
468 | 354 | def generate(self): | ||
469 | 355 | layers = self.fetch() | ||
470 | 356 | self.formulate_plan(layers) | ||
471 | 357 | self.exec_plan(self.plan, self.layers) | ||
472 | 358 | |||
473 | 359 | def validate(self): | ||
474 | 360 | p = self.target_dir / ".composer.manifest" | ||
475 | 361 | if not p.exists(): | ||
476 | 362 | return [], [], [] | ||
477 | 363 | ignorer = utils.ignore_matcher(DEFAULT_IGNORES) | ||
478 | 364 | a, c, d = utils.delta_signatures(p, ignorer) | ||
479 | 365 | |||
480 | 366 | for f in a: | ||
481 | 367 | log.warn( | ||
482 | 368 | "Added unexpected file, should be in a base layer: %s", f) | ||
483 | 369 | for f in c: | ||
484 | 370 | log.warn( | ||
485 | 371 | "Changed file owned by another layer: %s", f) | ||
486 | 372 | for f in d: | ||
487 | 373 | log.warn( | ||
488 | 374 | "Deleted a file owned by another layer: %s", f) | ||
489 | 375 | if a or c or d: | ||
490 | 376 | if self.force is True: | ||
491 | 377 | log.info( | ||
492 | 378 | "Continuing with known changes to target layer. " | ||
493 | 379 | "Changes will be overwritten") | ||
494 | 380 | else: | ||
495 | 381 | raise ValueError( | ||
496 | 382 | "Unable to continue due to unexpected modifications") | ||
497 | 383 | return a, c, d | ||
498 | 384 | |||
499 | 385 | def __call__(self): | ||
500 | 386 | self.find_or_create_repo() | ||
501 | 387 | |||
502 | 388 | log.debug(json.dumps( | ||
503 | 389 | self.status(), indent=2, sort_keys=True, default=str)) | ||
504 | 390 | self.validate() | ||
505 | 391 | self.generate() | ||
506 | 392 | |||
507 | 393 | def inspect(self): | ||
508 | 394 | self.charm = path(self.charm).abspath() | ||
509 | 395 | inspector.inspect(self.charm) | ||
510 | 396 | |||
511 | 397 | def normalize_outputdir(self): | ||
512 | 398 | od = path(self.charm).normpath() | ||
513 | 399 | repo = os.environ.get('JUJU_REPOSITORY') | ||
514 | 400 | if repo: | ||
515 | 401 | repo = path(repo) | ||
516 | 402 | if repo.exists(): | ||
517 | 403 | od = repo | ||
518 | 404 | elif ":" in od: | ||
519 | 405 | od = od.basename | ||
520 | 406 | log.info("Composing into {}".format(od)) | ||
521 | 407 | self.output_dir = od | ||
522 | 408 | |||
523 | 409 | |||
524 | 410 | def configLogging(composer): | ||
525 | 411 | global log | ||
526 | 412 | clifmt = utils.ColoredFormatter( | ||
527 | 413 | blessings.Terminal(), | ||
528 | 414 | '%(name)s: %(message)s') | ||
529 | 415 | root_logger = logging.getLogger() | ||
530 | 416 | clihandler = logging.StreamHandler(sys.stdout) | ||
531 | 417 | clihandler.setFormatter(clifmt) | ||
532 | 418 | if isinstance(composer.log_level, str): | ||
533 | 419 | composer.log_level = composer.log_level.upper() | ||
534 | 420 | root_logger.setLevel(composer.log_level) | ||
535 | 421 | log.setLevel(composer.log_level) | ||
536 | 422 | root_logger.addHandler(clihandler) | ||
537 | 423 | requests_logger = logging.getLogger("requests") | ||
538 | 424 | requests_logger.setLevel(logging.WARNING) | ||
539 | 425 | |||
540 | 426 | |||
541 | 427 | def inspect(args=None): | ||
542 | 428 | composer = Composer() | ||
543 | 429 | parser = argparse.ArgumentParser() | ||
544 | 430 | parser.add_argument('-l', '--log-level', default=logging.INFO) | ||
545 | 431 | parser.add_argument('charm', nargs="?", default=".", type=path) | ||
546 | 432 | # Namespace will set the options as attrs of composer | ||
547 | 433 | parser.parse_args(args, namespace=composer) | ||
548 | 434 | configLogging(composer) | ||
549 | 435 | composer.inspect() | ||
550 | 436 | |||
551 | 437 | |||
552 | 438 | def main(args=None): | ||
553 | 439 | composer = Composer() | ||
554 | 440 | parser = argparse.ArgumentParser( | ||
555 | 441 | description="Compose layers into a charm", | ||
556 | 442 | formatter_class=argparse.RawDescriptionHelpFormatter,) | ||
557 | 443 | parser.add_argument('-l', '--log-level', default=logging.INFO) | ||
558 | 444 | parser.add_argument('-f', '--force', action="store_true") | ||
559 | 445 | parser.add_argument('-o', '--output-dir', type=path) | ||
560 | 446 | parser.add_argument('-s', '--series', default="trusty") | ||
561 | 447 | parser.add_argument('--interface-service', | ||
562 | 448 | default="http://interfaces.juju.solutions") | ||
563 | 449 | parser.add_argument('-n', '--name', | ||
564 | 450 | help="Generate a charm of 'name' from 'charm'") | ||
565 | 451 | parser.add_argument('charm', nargs="?", default=".", type=path) | ||
566 | 452 | # Namespace will set the options as attrs of composer | ||
567 | 453 | parser.parse_args(args, namespace=composer) | ||
568 | 454 | # Monkey patch in the domain for the interface webservice | ||
569 | 455 | InterfaceFetcher.INTERFACE_DOMAIN = composer.interface_service | ||
570 | 456 | LayerFetcher.INTERFACE_DOMAIN = composer.interface_service | ||
571 | 457 | configLogging(composer) | ||
572 | 458 | |||
573 | 459 | if not composer.output_dir: | ||
574 | 460 | composer.normalize_outputdir() | ||
575 | 461 | |||
576 | 462 | composer() | ||
577 | 463 | |||
578 | 464 | |||
579 | 465 | if __name__ == '__main__': | ||
580 | 466 | main() | ||
581 | 0 | 467 | ||
582 | === added file 'charmtools/compose/config.py' | |||
583 | --- charmtools/compose/config.py 1970-01-01 00:00:00 +0000 | |||
584 | +++ charmtools/compose/config.py 2015-08-31 19:32:56 +0000 | |||
585 | @@ -0,0 +1,113 @@ | |||
586 | 1 | from .tactics import DEFAULT_TACTICS, load_tactic | ||
587 | 2 | |||
588 | 3 | |||
589 | 4 | import pathspec | ||
590 | 5 | from ruamel import yaml | ||
591 | 6 | import logging | ||
592 | 7 | from path import path | ||
593 | 8 | from otherstuf import chainstuf | ||
594 | 9 | |||
595 | 10 | DEFAULT_IGNORES = [ | ||
596 | 11 | ".bzr/", | ||
597 | 12 | ".git/", | ||
598 | 13 | "**/.ropeproject/", | ||
599 | 14 | "*.pyc", | ||
600 | 15 | "*~", | ||
601 | 16 | ".tox/", | ||
602 | 17 | "build/", | ||
603 | 18 | ] | ||
604 | 19 | |||
605 | 20 | |||
606 | 21 | class ComposerConfig(chainstuf): | ||
607 | 22 | """Defaults for controlling the generator, each layer in | ||
608 | 23 | the inclusion graph can provide values, including things | ||
609 | 24 | like overrides, or warnings if things are overridden that | ||
610 | 25 | shouldn't be. | ||
611 | 26 | """ | ||
612 | 27 | DEFAULT_FILE = "composer.yaml" | ||
613 | 28 | |||
614 | 29 | def __init__(self, *args, **kwargs): | ||
615 | 30 | super(ComposerConfig, self).__init__(*args, **kwargs) | ||
616 | 31 | self['_tactics'] = [] | ||
617 | 32 | self.configured = False | ||
618 | 33 | |||
619 | 34 | def __getattr__(self, key): | ||
620 | 35 | return self[key] | ||
621 | 36 | |||
622 | 37 | def rget(self, key): | ||
623 | 38 | """Combine all the results from all the layers into a single iter""" | ||
624 | 39 | result = [] | ||
625 | 40 | for m in self.maps: | ||
626 | 41 | r = m.get(key) | ||
627 | 42 | if r: | ||
628 | 43 | if isinstance(r, (list, tuple)): | ||
629 | 44 | result.extend(r) | ||
630 | 45 | else: | ||
631 | 46 | result.append(r) | ||
632 | 47 | return result | ||
633 | 48 | |||
634 | 49 | def configure(self, config_file, allow_missing=False): | ||
635 | 50 | config_file = path(config_file) | ||
636 | 51 | data = None | ||
637 | 52 | if not config_file.exists() and not allow_missing: | ||
638 | 53 | raise OSError("Missing Config File {}".format(config_file)) | ||
639 | 54 | try: | ||
640 | 55 | if config_file.exists(): | ||
641 | 56 | data = yaml.load(config_file.open()) | ||
642 | 57 | self.configured = True | ||
643 | 58 | except yaml.parser.ParserError: | ||
644 | 59 | logging.critical("Malformed Config file: {}".format(config_file)) | ||
645 | 60 | raise | ||
646 | 61 | if data: | ||
647 | 62 | self.update(data) | ||
648 | 63 | # look at any possible imports and use them to build tactics | ||
649 | 64 | tactics = self.get('tactics') | ||
650 | 65 | basedir = config_file.dirname() | ||
651 | 66 | if tactics: | ||
652 | 67 | for name in tactics: | ||
653 | 68 | tactic = load_tactic(name, basedir) | ||
654 | 69 | self._tactics.append(tactic) | ||
655 | 70 | return self | ||
656 | 71 | |||
657 | 72 | @classmethod | ||
658 | 73 | def from_config(cls, config_file, allow_missing=False): | ||
659 | 74 | c = cls() | ||
660 | 75 | c.configure(config_file, allow_missing) | ||
661 | 76 | return c | ||
662 | 77 | |||
663 | 78 | def add_config(self, config_file, allow_missing=False): | ||
664 | 79 | c = self.new_child() | ||
665 | 80 | c.configure(config_file, allow_missing) | ||
666 | 81 | return c | ||
667 | 82 | |||
668 | 83 | @property | ||
669 | 84 | def name(self): | ||
670 | 85 | return self.get('name') | ||
671 | 86 | |||
672 | 87 | @property | ||
673 | 88 | def ignores(self): | ||
674 | 89 | return self.rget('ignore') + DEFAULT_IGNORES | ||
675 | 90 | |||
676 | 91 | def tactics(self): | ||
677 | 92 | # XXX: combine from config layer | ||
678 | 93 | return self.rget('_tactics') + DEFAULT_TACTICS | ||
679 | 94 | |||
680 | 95 | def tactic(self, entity, current, target, next_config): | ||
681 | 96 | # Produce a tactic for the entity in question | ||
682 | 97 | # These will be accumulate through the layers | ||
683 | 98 | # and executed later | ||
684 | 99 | bd = current.directory | ||
685 | 100 | # Ignore handling | ||
686 | 101 | if next_config: | ||
687 | 102 | spec = pathspec.PathSpec.from_lines(pathspec.GitIgnorePattern, | ||
688 | 103 | next_config.ignores) | ||
689 | 104 | p = entity.relpath(bd) | ||
690 | 105 | matches = spec.match_files((p,)) | ||
691 | 106 | if p in matches: | ||
692 | 107 | return None | ||
693 | 108 | |||
694 | 109 | for tactic in self.tactics(): | ||
695 | 110 | if tactic.trigger(entity.relpath(bd)): | ||
696 | 111 | return tactic(target=target, entity=entity, | ||
697 | 112 | current=current, config=next_config) | ||
698 | 113 | return None | ||
699 | 0 | 114 | ||
700 | === added file 'charmtools/compose/diff_match_patch.py' | |||
701 | --- charmtools/compose/diff_match_patch.py 1970-01-01 00:00:00 +0000 | |||
702 | +++ charmtools/compose/diff_match_patch.py 2015-08-31 19:32:56 +0000 | |||
703 | @@ -0,0 +1,1919 @@ | |||
704 | 1 | #!/usr/bin/python2.4 | ||
705 | 2 | |||
706 | 3 | from __future__ import division | ||
707 | 4 | |||
708 | 5 | """Diff Match and Patch | ||
709 | 6 | |||
710 | 7 | Copyright 2006 Google Inc. | ||
711 | 8 | http://code.google.com/p/google-diff-match-patch/ | ||
712 | 9 | |||
713 | 10 | Licensed under the Apache License, Version 2.0 (the "License"); | ||
714 | 11 | you may not use this file except in compliance with the License. | ||
715 | 12 | You may obtain a copy of the License at | ||
716 | 13 | |||
717 | 14 | http://www.apache.org/licenses/LICENSE-2.0 | ||
718 | 15 | |||
719 | 16 | Unless required by applicable law or agreed to in writing, software | ||
720 | 17 | distributed under the License is distributed on an "AS IS" BASIS, | ||
721 | 18 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
722 | 19 | See the License for the specific language governing permissions and | ||
723 | 20 | limitations under the License. | ||
724 | 21 | """ | ||
725 | 22 | |||
726 | 23 | """Functions for diff, match and patch. | ||
727 | 24 | |||
728 | 25 | Computes the difference between two texts to create a patch. | ||
729 | 26 | Applies the patch onto another text, allowing for errors. | ||
730 | 27 | """ | ||
731 | 28 | |||
732 | 29 | __author__ = 'fraser@google.com (Neil Fraser)' | ||
733 | 30 | |||
734 | 31 | import math | ||
735 | 32 | import re | ||
736 | 33 | import sys | ||
737 | 34 | import time | ||
738 | 35 | import urllib | ||
739 | 36 | |||
740 | 37 | class diff_match_patch: | ||
741 | 38 | """Class containing the diff, match and patch methods. | ||
742 | 39 | |||
743 | 40 | Also contains the behaviour settings. | ||
744 | 41 | """ | ||
745 | 42 | |||
746 | 43 | def __init__(self): | ||
747 | 44 | """Inits a diff_match_patch object with default settings. | ||
748 | 45 | Redefine these in your program to override the defaults. | ||
749 | 46 | """ | ||
750 | 47 | |||
751 | 48 | # Number of seconds to map a diff before giving up (0 for infinity). | ||
752 | 49 | self.Diff_Timeout = 1.0 | ||
753 | 50 | # Cost of an empty edit operation in terms of edit characters. | ||
754 | 51 | self.Diff_EditCost = 4 | ||
755 | 52 | # At what point is no match declared (0.0 = perfection, 1.0 = very loose). | ||
756 | 53 | self.Match_Threshold = 0.5 | ||
757 | 54 | # How far to search for a match (0 = exact location, 1000+ = broad match). | ||
758 | 55 | # A match this many characters away from the expected location will add | ||
759 | 56 | # 1.0 to the score (0.0 is a perfect match). | ||
760 | 57 | self.Match_Distance = 1000 | ||
761 | 58 | # When deleting a large block of text (over ~64 characters), how close do | ||
762 | 59 | # the contents have to be to match the expected contents. (0.0 = perfection, | ||
763 | 60 | # 1.0 = very loose). Note that Match_Threshold controls how closely the | ||
764 | 61 | # end points of a delete need to match. | ||
765 | 62 | self.Patch_DeleteThreshold = 0.5 | ||
766 | 63 | # Chunk size for context length. | ||
767 | 64 | self.Patch_Margin = 4 | ||
768 | 65 | |||
769 | 66 | # The number of bits in an int. | ||
770 | 67 | # Python has no maximum, thus to disable patch splitting set to 0. | ||
771 | 68 | # However to avoid long patches in certain pathological cases, use 32. | ||
772 | 69 | # Multiple short patches (using native ints) are much faster than long ones. | ||
773 | 70 | self.Match_MaxBits = 32 | ||
774 | 71 | |||
775 | 72 | # DIFF FUNCTIONS | ||
776 | 73 | |||
777 | 74 | # The data structure representing a diff is an array of tuples: | ||
778 | 75 | # [(DIFF_DELETE, "Hello"), (DIFF_INSERT, "Goodbye"), (DIFF_EQUAL, " world.")] | ||
779 | 76 | # which means: delete "Hello", add "Goodbye" and keep " world." | ||
780 | 77 | DIFF_DELETE = -1 | ||
781 | 78 | DIFF_INSERT = 1 | ||
782 | 79 | DIFF_EQUAL = 0 | ||
783 | 80 | |||
784 | 81 | def diff_main(self, text1, text2, checklines=True, deadline=None): | ||
785 | 82 | """Find the differences between two texts. Simplifies the problem by | ||
786 | 83 | stripping any common prefix or suffix off the texts before diffing. | ||
787 | 84 | |||
788 | 85 | Args: | ||
789 | 86 | text1: Old string to be diffed. | ||
790 | 87 | text2: New string to be diffed. | ||
791 | 88 | checklines: Optional speedup flag. If present and false, then don't run | ||
792 | 89 | a line-level diff first to identify the changed areas. | ||
793 | 90 | Defaults to true, which does a faster, slightly less optimal diff. | ||
794 | 91 | deadline: Optional time when the diff should be complete by. Used | ||
795 | 92 | internally for recursive calls. Users should set DiffTimeout instead. | ||
796 | 93 | |||
797 | 94 | Returns: | ||
798 | 95 | Array of changes. | ||
799 | 96 | """ | ||
800 | 97 | # Set a deadline by which time the diff must be complete. | ||
801 | 98 | if deadline == None: | ||
802 | 99 | # Unlike in most languages, Python counts time in seconds. | ||
803 | 100 | if self.Diff_Timeout <= 0: | ||
804 | 101 | deadline = sys.maxint | ||
805 | 102 | else: | ||
806 | 103 | deadline = time.time() + self.Diff_Timeout | ||
807 | 104 | |||
808 | 105 | # Check for null inputs. | ||
809 | 106 | if text1 == None or text2 == None: | ||
810 | 107 | raise ValueError("Null inputs. (diff_main)") | ||
811 | 108 | |||
812 | 109 | # Check for equality (speedup). | ||
813 | 110 | if text1 == text2: | ||
814 | 111 | if text1: | ||
815 | 112 | return [(self.DIFF_EQUAL, text1)] | ||
816 | 113 | return [] | ||
817 | 114 | |||
818 | 115 | # Trim off common prefix (speedup). | ||
819 | 116 | commonlength = self.diff_commonPrefix(text1, text2) | ||
820 | 117 | commonprefix = text1[:commonlength] | ||
821 | 118 | text1 = text1[commonlength:] | ||
822 | 119 | text2 = text2[commonlength:] | ||
823 | 120 | |||
824 | 121 | # Trim off common suffix (speedup). | ||
825 | 122 | commonlength = self.diff_commonSuffix(text1, text2) | ||
826 | 123 | if commonlength == 0: | ||
827 | 124 | commonsuffix = '' | ||
828 | 125 | else: | ||
829 | 126 | commonsuffix = text1[-commonlength:] | ||
830 | 127 | text1 = text1[:-commonlength] | ||
831 | 128 | text2 = text2[:-commonlength] | ||
832 | 129 | |||
833 | 130 | # Compute the diff on the middle block. | ||
834 | 131 | diffs = self.diff_compute(text1, text2, checklines, deadline) | ||
835 | 132 | |||
836 | 133 | # Restore the prefix and suffix. | ||
837 | 134 | if commonprefix: | ||
838 | 135 | diffs[:0] = [(self.DIFF_EQUAL, commonprefix)] | ||
839 | 136 | if commonsuffix: | ||
840 | 137 | diffs.append((self.DIFF_EQUAL, commonsuffix)) | ||
841 | 138 | self.diff_cleanupMerge(diffs) | ||
842 | 139 | return diffs | ||
843 | 140 | |||
844 | 141 | def diff_compute(self, text1, text2, checklines, deadline): | ||
845 | 142 | """Find the differences between two texts. Assumes that the texts do not | ||
846 | 143 | have any common prefix or suffix. | ||
847 | 144 | |||
848 | 145 | Args: | ||
849 | 146 | text1: Old string to be diffed. | ||
850 | 147 | text2: New string to be diffed. | ||
851 | 148 | checklines: Speedup flag. If false, then don't run a line-level diff | ||
852 | 149 | first to identify the changed areas. | ||
853 | 150 | If true, then run a faster, slightly less optimal diff. | ||
854 | 151 | deadline: Time when the diff should be complete by. | ||
855 | 152 | |||
856 | 153 | Returns: | ||
857 | 154 | Array of changes. | ||
858 | 155 | """ | ||
859 | 156 | if not text1: | ||
860 | 157 | # Just add some text (speedup). | ||
861 | 158 | return [(self.DIFF_INSERT, text2)] | ||
862 | 159 | |||
863 | 160 | if not text2: | ||
864 | 161 | # Just delete some text (speedup). | ||
865 | 162 | return [(self.DIFF_DELETE, text1)] | ||
866 | 163 | |||
867 | 164 | if len(text1) > len(text2): | ||
868 | 165 | (longtext, shorttext) = (text1, text2) | ||
869 | 166 | else: | ||
870 | 167 | (shorttext, longtext) = (text1, text2) | ||
871 | 168 | i = longtext.find(shorttext) | ||
872 | 169 | if i != -1: | ||
873 | 170 | # Shorter text is inside the longer text (speedup). | ||
874 | 171 | diffs = [(self.DIFF_INSERT, longtext[:i]), (self.DIFF_EQUAL, shorttext), | ||
875 | 172 | (self.DIFF_INSERT, longtext[i + len(shorttext):])] | ||
876 | 173 | # Swap insertions for deletions if diff is reversed. | ||
877 | 174 | if len(text1) > len(text2): | ||
878 | 175 | diffs[0] = (self.DIFF_DELETE, diffs[0][1]) | ||
879 | 176 | diffs[2] = (self.DIFF_DELETE, diffs[2][1]) | ||
880 | 177 | return diffs | ||
881 | 178 | |||
882 | 179 | if len(shorttext) == 1: | ||
883 | 180 | # Single character string. | ||
884 | 181 | # After the previous speedup, the character can't be an equality. | ||
885 | 182 | return [(self.DIFF_DELETE, text1), (self.DIFF_INSERT, text2)] | ||
886 | 183 | |||
887 | 184 | # Check to see if the problem can be split in two. | ||
888 | 185 | hm = self.diff_halfMatch(text1, text2) | ||
889 | 186 | if hm: | ||
890 | 187 | # A half-match was found, sort out the return data. | ||
891 | 188 | (text1_a, text1_b, text2_a, text2_b, mid_common) = hm | ||
892 | 189 | # Send both pairs off for separate processing. | ||
893 | 190 | diffs_a = self.diff_main(text1_a, text2_a, checklines, deadline) | ||
894 | 191 | diffs_b = self.diff_main(text1_b, text2_b, checklines, deadline) | ||
895 | 192 | # Merge the results. | ||
896 | 193 | return diffs_a + [(self.DIFF_EQUAL, mid_common)] + diffs_b | ||
897 | 194 | |||
898 | 195 | if checklines and len(text1) > 100 and len(text2) > 100: | ||
899 | 196 | return self.diff_lineMode(text1, text2, deadline) | ||
900 | 197 | |||
901 | 198 | return self.diff_bisect(text1, text2, deadline) | ||
902 | 199 | |||
903 | 200 | def diff_lineMode(self, text1, text2, deadline): | ||
904 | 201 | """Do a quick line-level diff on both strings, then rediff the parts for | ||
905 | 202 | greater accuracy. | ||
906 | 203 | This speedup can produce non-minimal diffs. | ||
907 | 204 | |||
908 | 205 | Args: | ||
909 | 206 | text1: Old string to be diffed. | ||
910 | 207 | text2: New string to be diffed. | ||
911 | 208 | deadline: Time when the diff should be complete by. | ||
912 | 209 | |||
913 | 210 | Returns: | ||
914 | 211 | Array of changes. | ||
915 | 212 | """ | ||
916 | 213 | |||
917 | 214 | # Scan the text on a line-by-line basis first. | ||
918 | 215 | (text1, text2, linearray) = self.diff_linesToChars(text1, text2) | ||
919 | 216 | |||
920 | 217 | diffs = self.diff_main(text1, text2, False, deadline) | ||
921 | 218 | |||
922 | 219 | # Convert the diff back to original text. | ||
923 | 220 | self.diff_charsToLines(diffs, linearray) | ||
924 | 221 | # Eliminate freak matches (e.g. blank lines) | ||
925 | 222 | self.diff_cleanupSemantic(diffs) | ||
926 | 223 | |||
927 | 224 | # Rediff any replacement blocks, this time character-by-character. | ||
928 | 225 | # Add a dummy entry at the end. | ||
929 | 226 | diffs.append((self.DIFF_EQUAL, '')) | ||
930 | 227 | pointer = 0 | ||
931 | 228 | count_delete = 0 | ||
932 | 229 | count_insert = 0 | ||
933 | 230 | text_delete = '' | ||
934 | 231 | text_insert = '' | ||
935 | 232 | while pointer < len(diffs): | ||
936 | 233 | if diffs[pointer][0] == self.DIFF_INSERT: | ||
937 | 234 | count_insert += 1 | ||
938 | 235 | text_insert += diffs[pointer][1] | ||
939 | 236 | elif diffs[pointer][0] == self.DIFF_DELETE: | ||
940 | 237 | count_delete += 1 | ||
941 | 238 | text_delete += diffs[pointer][1] | ||
942 | 239 | elif diffs[pointer][0] == self.DIFF_EQUAL: | ||
943 | 240 | # Upon reaching an equality, check for prior redundancies. | ||
944 | 241 | if count_delete >= 1 and count_insert >= 1: | ||
945 | 242 | # Delete the offending records and add the merged ones. | ||
946 | 243 | a = self.diff_main(text_delete, text_insert, False, deadline) | ||
947 | 244 | diffs[pointer - count_delete - count_insert : pointer] = a | ||
948 | 245 | pointer = pointer - count_delete - count_insert + len(a) | ||
949 | 246 | count_insert = 0 | ||
950 | 247 | count_delete = 0 | ||
951 | 248 | text_delete = '' | ||
952 | 249 | text_insert = '' | ||
953 | 250 | |||
954 | 251 | pointer += 1 | ||
955 | 252 | |||
956 | 253 | diffs.pop() # Remove the dummy entry at the end. | ||
957 | 254 | |||
958 | 255 | return diffs | ||
959 | 256 | |||
960 | 257 | def diff_bisect(self, text1, text2, deadline): | ||
961 | 258 | """Find the 'middle snake' of a diff, split the problem in two | ||
962 | 259 | and return the recursively constructed diff. | ||
963 | 260 | See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations. | ||
964 | 261 | |||
965 | 262 | Args: | ||
966 | 263 | text1: Old string to be diffed. | ||
967 | 264 | text2: New string to be diffed. | ||
968 | 265 | deadline: Time at which to bail if not yet complete. | ||
969 | 266 | |||
970 | 267 | Returns: | ||
971 | 268 | Array of diff tuples. | ||
972 | 269 | """ | ||
973 | 270 | |||
974 | 271 | # Cache the text lengths to prevent multiple calls. | ||
975 | 272 | text1_length = len(text1) | ||
976 | 273 | text2_length = len(text2) | ||
977 | 274 | max_d = (text1_length + text2_length + 1) // 2 | ||
978 | 275 | v_offset = max_d | ||
979 | 276 | v_length = 2 * max_d | ||
980 | 277 | v1 = [-1] * v_length | ||
981 | 278 | v1[v_offset + 1] = 0 | ||
982 | 279 | v2 = v1[:] | ||
983 | 280 | delta = text1_length - text2_length | ||
984 | 281 | # If the total number of characters is odd, then the front path will | ||
985 | 282 | # collide with the reverse path. | ||
986 | 283 | front = (delta % 2 != 0) | ||
987 | 284 | # Offsets for start and end of k loop. | ||
988 | 285 | # Prevents mapping of space beyond the grid. | ||
989 | 286 | k1start = 0 | ||
990 | 287 | k1end = 0 | ||
991 | 288 | k2start = 0 | ||
992 | 289 | k2end = 0 | ||
993 | 290 | for d in xrange(max_d): | ||
994 | 291 | # Bail out if deadline is reached. | ||
995 | 292 | if time.time() > deadline: | ||
996 | 293 | break | ||
997 | 294 | |||
998 | 295 | # Walk the front path one step. | ||
999 | 296 | for k1 in xrange(-d + k1start, d + 1 - k1end, 2): | ||
1000 | 297 | k1_offset = v_offset + k1 | ||
1001 | 298 | if k1 == -d or (k1 != d and | ||
1002 | 299 | v1[k1_offset - 1] < v1[k1_offset + 1]): | ||
1003 | 300 | x1 = v1[k1_offset + 1] | ||
1004 | 301 | else: | ||
1005 | 302 | x1 = v1[k1_offset - 1] + 1 | ||
1006 | 303 | y1 = x1 - k1 | ||
1007 | 304 | while (x1 < text1_length and y1 < text2_length and | ||
1008 | 305 | text1[x1] == text2[y1]): | ||
1009 | 306 | x1 += 1 | ||
1010 | 307 | y1 += 1 | ||
1011 | 308 | v1[k1_offset] = x1 | ||
1012 | 309 | if x1 > text1_length: | ||
1013 | 310 | # Ran off the right of the graph. | ||
1014 | 311 | k1end += 2 | ||
1015 | 312 | elif y1 > text2_length: | ||
1016 | 313 | # Ran off the bottom of the graph. | ||
1017 | 314 | k1start += 2 | ||
1018 | 315 | elif front: | ||
1019 | 316 | k2_offset = v_offset + delta - k1 | ||
1020 | 317 | if k2_offset >= 0 and k2_offset < v_length and v2[k2_offset] != -1: | ||
1021 | 318 | # Mirror x2 onto top-left coordinate system. | ||
1022 | 319 | x2 = text1_length - v2[k2_offset] | ||
1023 | 320 | if x1 >= x2: | ||
1024 | 321 | # Overlap detected. | ||
1025 | 322 | return self.diff_bisectSplit(text1, text2, x1, y1, deadline) | ||
1026 | 323 | |||
1027 | 324 | # Walk the reverse path one step. | ||
1028 | 325 | for k2 in xrange(-d + k2start, d + 1 - k2end, 2): | ||
1029 | 326 | k2_offset = v_offset + k2 | ||
1030 | 327 | if k2 == -d or (k2 != d and | ||
1031 | 328 | v2[k2_offset - 1] < v2[k2_offset + 1]): | ||
1032 | 329 | x2 = v2[k2_offset + 1] | ||
1033 | 330 | else: | ||
1034 | 331 | x2 = v2[k2_offset - 1] + 1 | ||
1035 | 332 | y2 = x2 - k2 | ||
1036 | 333 | while (x2 < text1_length and y2 < text2_length and | ||
1037 | 334 | text1[-x2 - 1] == text2[-y2 - 1]): | ||
1038 | 335 | x2 += 1 | ||
1039 | 336 | y2 += 1 | ||
1040 | 337 | v2[k2_offset] = x2 | ||
1041 | 338 | if x2 > text1_length: | ||
1042 | 339 | # Ran off the left of the graph. | ||
1043 | 340 | k2end += 2 | ||
1044 | 341 | elif y2 > text2_length: | ||
1045 | 342 | # Ran off the top of the graph. | ||
1046 | 343 | k2start += 2 | ||
1047 | 344 | elif not front: | ||
1048 | 345 | k1_offset = v_offset + delta - k2 | ||
1049 | 346 | if k1_offset >= 0 and k1_offset < v_length and v1[k1_offset] != -1: | ||
1050 | 347 | x1 = v1[k1_offset] | ||
1051 | 348 | y1 = v_offset + x1 - k1_offset | ||
1052 | 349 | # Mirror x2 onto top-left coordinate system. | ||
1053 | 350 | x2 = text1_length - x2 | ||
1054 | 351 | if x1 >= x2: | ||
1055 | 352 | # Overlap detected. | ||
1056 | 353 | return self.diff_bisectSplit(text1, text2, x1, y1, deadline) | ||
1057 | 354 | |||
1058 | 355 | # Diff took too long and hit the deadline or | ||
1059 | 356 | # number of diffs equals number of characters, no commonality at all. | ||
1060 | 357 | return [(self.DIFF_DELETE, text1), (self.DIFF_INSERT, text2)] | ||
1061 | 358 | |||
1062 | 359 | def diff_bisectSplit(self, text1, text2, x, y, deadline): | ||
1063 | 360 | """Given the location of the 'middle snake', split the diff in two parts | ||
1064 | 361 | and recurse. | ||
1065 | 362 | |||
1066 | 363 | Args: | ||
1067 | 364 | text1: Old string to be diffed. | ||
1068 | 365 | text2: New string to be diffed. | ||
1069 | 366 | x: Index of split point in text1. | ||
1070 | 367 | y: Index of split point in text2. | ||
1071 | 368 | deadline: Time at which to bail if not yet complete. | ||
1072 | 369 | |||
1073 | 370 | Returns: | ||
1074 | 371 | Array of diff tuples. | ||
1075 | 372 | """ | ||
1076 | 373 | text1a = text1[:x] | ||
1077 | 374 | text2a = text2[:y] | ||
1078 | 375 | text1b = text1[x:] | ||
1079 | 376 | text2b = text2[y:] | ||
1080 | 377 | |||
1081 | 378 | # Compute both diffs serially. | ||
1082 | 379 | diffs = self.diff_main(text1a, text2a, False, deadline) | ||
1083 | 380 | diffsb = self.diff_main(text1b, text2b, False, deadline) | ||
1084 | 381 | |||
1085 | 382 | return diffs + diffsb | ||
1086 | 383 | |||
1087 | 384 | def diff_linesToChars(self, text1, text2): | ||
1088 | 385 | """Split two texts into an array of strings. Reduce the texts to a string | ||
1089 | 386 | of hashes where each Unicode character represents one line. | ||
1090 | 387 | |||
1091 | 388 | Args: | ||
1092 | 389 | text1: First string. | ||
1093 | 390 | text2: Second string. | ||
1094 | 391 | |||
1095 | 392 | Returns: | ||
1096 | 393 | Three element tuple, containing the encoded text1, the encoded text2 and | ||
1097 | 394 | the array of unique strings. The zeroth element of the array of unique | ||
1098 | 395 | strings is intentionally blank. | ||
1099 | 396 | """ | ||
1100 | 397 | lineArray = [] # e.g. lineArray[4] == "Hello\n" | ||
1101 | 398 | lineHash = {} # e.g. lineHash["Hello\n"] == 4 | ||
1102 | 399 | |||
1103 | 400 | # "\x00" is a valid character, but various debuggers don't like it. | ||
1104 | 401 | # So we'll insert a junk entry to avoid generating a null character. | ||
1105 | 402 | lineArray.append('') | ||
1106 | 403 | |||
1107 | 404 | def diff_linesToCharsMunge(text): | ||
1108 | 405 | """Split a text into an array of strings. Reduce the texts to a string | ||
1109 | 406 | of hashes where each Unicode character represents one line. | ||
1110 | 407 | Modifies linearray and linehash through being a closure. | ||
1111 | 408 | |||
1112 | 409 | Args: | ||
1113 | 410 | text: String to encode. | ||
1114 | 411 | |||
1115 | 412 | Returns: | ||
1116 | 413 | Encoded string. | ||
1117 | 414 | """ | ||
1118 | 415 | chars = [] | ||
1119 | 416 | # Walk the text, pulling out a substring for each line. | ||
1120 | 417 | # text.split('\n') would would temporarily double our memory footprint. | ||
1121 | 418 | # Modifying text would create many large strings to garbage collect. | ||
1122 | 419 | lineStart = 0 | ||
1123 | 420 | lineEnd = -1 | ||
1124 | 421 | while lineEnd < len(text) - 1: | ||
1125 | 422 | lineEnd = text.find('\n', lineStart) | ||
1126 | 423 | if lineEnd == -1: | ||
1127 | 424 | lineEnd = len(text) - 1 | ||
1128 | 425 | line = text[lineStart:lineEnd + 1] | ||
1129 | 426 | lineStart = lineEnd + 1 | ||
1130 | 427 | |||
1131 | 428 | if line in lineHash: | ||
1132 | 429 | chars.append(unichr(lineHash[line])) | ||
1133 | 430 | else: | ||
1134 | 431 | lineArray.append(line) | ||
1135 | 432 | lineHash[line] = len(lineArray) - 1 | ||
1136 | 433 | chars.append(unichr(len(lineArray) - 1)) | ||
1137 | 434 | return "".join(chars) | ||
1138 | 435 | |||
1139 | 436 | chars1 = diff_linesToCharsMunge(text1) | ||
1140 | 437 | chars2 = diff_linesToCharsMunge(text2) | ||
1141 | 438 | return (chars1, chars2, lineArray) | ||
1142 | 439 | |||
1143 | 440 | def diff_charsToLines(self, diffs, lineArray): | ||
1144 | 441 | """Rehydrate the text in a diff from a string of line hashes to real lines | ||
1145 | 442 | of text. | ||
1146 | 443 | |||
1147 | 444 | Args: | ||
1148 | 445 | diffs: Array of diff tuples. | ||
1149 | 446 | lineArray: Array of unique strings. | ||
1150 | 447 | """ | ||
1151 | 448 | for x in xrange(len(diffs)): | ||
1152 | 449 | text = [] | ||
1153 | 450 | for char in diffs[x][1]: | ||
1154 | 451 | text.append(lineArray[ord(char)]) | ||
1155 | 452 | diffs[x] = (diffs[x][0], "".join(text)) | ||
1156 | 453 | |||
1157 | 454 | def diff_commonPrefix(self, text1, text2): | ||
1158 | 455 | """Determine the common prefix of two strings. | ||
1159 | 456 | |||
1160 | 457 | Args: | ||
1161 | 458 | text1: First string. | ||
1162 | 459 | text2: Second string. | ||
1163 | 460 | |||
1164 | 461 | Returns: | ||
1165 | 462 | The number of characters common to the start of each string. | ||
1166 | 463 | """ | ||
1167 | 464 | # Quick check for common null cases. | ||
1168 | 465 | if not text1 or not text2 or text1[0] != text2[0]: | ||
1169 | 466 | return 0 | ||
1170 | 467 | # Binary search. | ||
1171 | 468 | # Performance analysis: http://neil.fraser.name/news/2007/10/09/ | ||
1172 | 469 | pointermin = 0 | ||
1173 | 470 | pointermax = min(len(text1), len(text2)) | ||
1174 | 471 | pointermid = pointermax | ||
1175 | 472 | pointerstart = 0 | ||
1176 | 473 | while pointermin < pointermid: | ||
1177 | 474 | if text1[pointerstart:pointermid] == text2[pointerstart:pointermid]: | ||
1178 | 475 | pointermin = pointermid | ||
1179 | 476 | pointerstart = pointermin | ||
1180 | 477 | else: | ||
1181 | 478 | pointermax = pointermid | ||
1182 | 479 | pointermid = (pointermax - pointermin) // 2 + pointermin | ||
1183 | 480 | return pointermid | ||
1184 | 481 | |||
1185 | 482 | def diff_commonSuffix(self, text1, text2): | ||
1186 | 483 | """Determine the common suffix of two strings. | ||
1187 | 484 | |||
1188 | 485 | Args: | ||
1189 | 486 | text1: First string. | ||
1190 | 487 | text2: Second string. | ||
1191 | 488 | |||
1192 | 489 | Returns: | ||
1193 | 490 | The number of characters common to the end of each string. | ||
1194 | 491 | """ | ||
1195 | 492 | # Quick check for common null cases. | ||
1196 | 493 | if not text1 or not text2 or text1[-1] != text2[-1]: | ||
1197 | 494 | return 0 | ||
1198 | 495 | # Binary search. | ||
1199 | 496 | # Performance analysis: http://neil.fraser.name/news/2007/10/09/ | ||
1200 | 497 | pointermin = 0 | ||
1201 | 498 | pointermax = min(len(text1), len(text2)) | ||
1202 | 499 | pointermid = pointermax | ||
1203 | 500 | pointerend = 0 | ||
1204 | 501 | while pointermin < pointermid: | ||
1205 | 502 | if (text1[-pointermid:len(text1) - pointerend] == | ||
1206 | 503 | text2[-pointermid:len(text2) - pointerend]): | ||
1207 | 504 | pointermin = pointermid | ||
1208 | 505 | pointerend = pointermin | ||
1209 | 506 | else: | ||
1210 | 507 | pointermax = pointermid | ||
1211 | 508 | pointermid = (pointermax - pointermin) // 2 + pointermin | ||
1212 | 509 | return pointermid | ||
1213 | 510 | |||
1214 | 511 | def diff_commonOverlap(self, text1, text2): | ||
1215 | 512 | """Determine if the suffix of one string is the prefix of another. | ||
1216 | 513 | |||
1217 | 514 | Args: | ||
1218 | 515 | text1 First string. | ||
1219 | 516 | text2 Second string. | ||
1220 | 517 | |||
1221 | 518 | Returns: | ||
1222 | 519 | The number of characters common to the end of the first | ||
1223 | 520 | string and the start of the second string. | ||
1224 | 521 | """ | ||
1225 | 522 | # Cache the text lengths to prevent multiple calls. | ||
1226 | 523 | text1_length = len(text1) | ||
1227 | 524 | text2_length = len(text2) | ||
1228 | 525 | # Eliminate the null case. | ||
1229 | 526 | if text1_length == 0 or text2_length == 0: | ||
1230 | 527 | return 0 | ||
1231 | 528 | # Truncate the longer string. | ||
1232 | 529 | if text1_length > text2_length: | ||
1233 | 530 | text1 = text1[-text2_length:] | ||
1234 | 531 | elif text1_length < text2_length: | ||
1235 | 532 | text2 = text2[:text1_length] | ||
1236 | 533 | text_length = min(text1_length, text2_length) | ||
1237 | 534 | # Quick check for the worst case. | ||
1238 | 535 | if text1 == text2: | ||
1239 | 536 | return text_length | ||
1240 | 537 | |||
1241 | 538 | # Start by looking for a single character match | ||
1242 | 539 | # and increase length until no match is found. | ||
1243 | 540 | # Performance analysis: http://neil.fraser.name/news/2010/11/04/ | ||
1244 | 541 | best = 0 | ||
1245 | 542 | length = 1 | ||
1246 | 543 | while True: | ||
1247 | 544 | pattern = text1[-length:] | ||
1248 | 545 | found = text2.find(pattern) | ||
1249 | 546 | if found == -1: | ||
1250 | 547 | return best | ||
1251 | 548 | length += found | ||
1252 | 549 | if found == 0 or text1[-length:] == text2[:length]: | ||
1253 | 550 | best = length | ||
1254 | 551 | length += 1 | ||
1255 | 552 | |||
1256 | 553 | def diff_halfMatch(self, text1, text2): | ||
1257 | 554 | """Do the two texts share a substring which is at least half the length of | ||
1258 | 555 | the longer text? | ||
1259 | 556 | This speedup can produce non-minimal diffs. | ||
1260 | 557 | |||
1261 | 558 | Args: | ||
1262 | 559 | text1: First string. | ||
1263 | 560 | text2: Second string. | ||
1264 | 561 | |||
1265 | 562 | Returns: | ||
1266 | 563 | Five element Array, containing the prefix of text1, the suffix of text1, | ||
1267 | 564 | the prefix of text2, the suffix of text2 and the common middle. Or None | ||
1268 | 565 | if there was no match. | ||
1269 | 566 | """ | ||
1270 | 567 | if self.Diff_Timeout <= 0: | ||
1271 | 568 | # Don't risk returning a non-optimal diff if we have unlimited time. | ||
1272 | 569 | return None | ||
1273 | 570 | if len(text1) > len(text2): | ||
1274 | 571 | (longtext, shorttext) = (text1, text2) | ||
1275 | 572 | else: | ||
1276 | 573 | (shorttext, longtext) = (text1, text2) | ||
1277 | 574 | if len(longtext) < 4 or len(shorttext) * 2 < len(longtext): | ||
1278 | 575 | return None # Pointless. | ||
1279 | 576 | |||
1280 | 577 | def diff_halfMatchI(longtext, shorttext, i): | ||
1281 | 578 | """Does a substring of shorttext exist within longtext such that the | ||
1282 | 579 | substring is at least half the length of longtext? | ||
1283 | 580 | Closure, but does not reference any external variables. | ||
1284 | 581 | |||
1285 | 582 | Args: | ||
1286 | 583 | longtext: Longer string. | ||
1287 | 584 | shorttext: Shorter string. | ||
1288 | 585 | i: Start index of quarter length substring within longtext. | ||
1289 | 586 | |||
1290 | 587 | Returns: | ||
1291 | 588 | Five element Array, containing the prefix of longtext, the suffix of | ||
1292 | 589 | longtext, the prefix of shorttext, the suffix of shorttext and the | ||
1293 | 590 | common middle. Or None if there was no match. | ||
1294 | 591 | """ | ||
1295 | 592 | seed = longtext[i:i + len(longtext) // 4] | ||
1296 | 593 | best_common = '' | ||
1297 | 594 | j = shorttext.find(seed) | ||
1298 | 595 | while j != -1: | ||
1299 | 596 | prefixLength = self.diff_commonPrefix(longtext[i:], shorttext[j:]) | ||
1300 | 597 | suffixLength = self.diff_commonSuffix(longtext[:i], shorttext[:j]) | ||
1301 | 598 | if len(best_common) < suffixLength + prefixLength: | ||
1302 | 599 | best_common = (shorttext[j - suffixLength:j] + | ||
1303 | 600 | shorttext[j:j + prefixLength]) | ||
1304 | 601 | best_longtext_a = longtext[:i - suffixLength] | ||
1305 | 602 | best_longtext_b = longtext[i + prefixLength:] | ||
1306 | 603 | best_shorttext_a = shorttext[:j - suffixLength] | ||
1307 | 604 | best_shorttext_b = shorttext[j + prefixLength:] | ||
1308 | 605 | j = shorttext.find(seed, j + 1) | ||
1309 | 606 | |||
1310 | 607 | if len(best_common) * 2 >= len(longtext): | ||
1311 | 608 | return (best_longtext_a, best_longtext_b, | ||
1312 | 609 | best_shorttext_a, best_shorttext_b, best_common) | ||
1313 | 610 | else: | ||
1314 | 611 | return None | ||
1315 | 612 | |||
1316 | 613 | # First check if the second quarter is the seed for a half-match. | ||
1317 | 614 | hm1 = diff_halfMatchI(longtext, shorttext, (len(longtext) + 3) // 4) | ||
1318 | 615 | # Check again based on the third quarter. | ||
1319 | 616 | hm2 = diff_halfMatchI(longtext, shorttext, (len(longtext) + 1) // 2) | ||
1320 | 617 | if not hm1 and not hm2: | ||
1321 | 618 | return None | ||
1322 | 619 | elif not hm2: | ||
1323 | 620 | hm = hm1 | ||
1324 | 621 | elif not hm1: | ||
1325 | 622 | hm = hm2 | ||
1326 | 623 | else: | ||
1327 | 624 | # Both matched. Select the longest. | ||
1328 | 625 | if len(hm1[4]) > len(hm2[4]): | ||
1329 | 626 | hm = hm1 | ||
1330 | 627 | else: | ||
1331 | 628 | hm = hm2 | ||
1332 | 629 | |||
1333 | 630 | # A half-match was found, sort out the return data. | ||
1334 | 631 | if len(text1) > len(text2): | ||
1335 | 632 | (text1_a, text1_b, text2_a, text2_b, mid_common) = hm | ||
1336 | 633 | else: | ||
1337 | 634 | (text2_a, text2_b, text1_a, text1_b, mid_common) = hm | ||
1338 | 635 | return (text1_a, text1_b, text2_a, text2_b, mid_common) | ||
1339 | 636 | |||
1340 | 637 | def diff_cleanupSemantic(self, diffs): | ||
1341 | 638 | """Reduce the number of edits by eliminating semantically trivial | ||
1342 | 639 | equalities. | ||
1343 | 640 | |||
1344 | 641 | Args: | ||
1345 | 642 | diffs: Array of diff tuples. | ||
1346 | 643 | """ | ||
1347 | 644 | changes = False | ||
1348 | 645 | equalities = [] # Stack of indices where equalities are found. | ||
1349 | 646 | lastequality = None # Always equal to diffs[equalities[-1]][1] | ||
1350 | 647 | pointer = 0 # Index of current position. | ||
1351 | 648 | # Number of chars that changed prior to the equality. | ||
1352 | 649 | length_insertions1, length_deletions1 = 0, 0 | ||
1353 | 650 | # Number of chars that changed after the equality. | ||
1354 | 651 | length_insertions2, length_deletions2 = 0, 0 | ||
1355 | 652 | while pointer < len(diffs): | ||
1356 | 653 | if diffs[pointer][0] == self.DIFF_EQUAL: # Equality found. | ||
1357 | 654 | equalities.append(pointer) | ||
1358 | 655 | length_insertions1, length_insertions2 = length_insertions2, 0 | ||
1359 | 656 | length_deletions1, length_deletions2 = length_deletions2, 0 | ||
1360 | 657 | lastequality = diffs[pointer][1] | ||
1361 | 658 | else: # An insertion or deletion. | ||
1362 | 659 | if diffs[pointer][0] == self.DIFF_INSERT: | ||
1363 | 660 | length_insertions2 += len(diffs[pointer][1]) | ||
1364 | 661 | else: | ||
1365 | 662 | length_deletions2 += len(diffs[pointer][1]) | ||
1366 | 663 | # Eliminate an equality that is smaller or equal to the edits on both | ||
1367 | 664 | # sides of it. | ||
1368 | 665 | if (lastequality and (len(lastequality) <= | ||
1369 | 666 | max(length_insertions1, length_deletions1)) and | ||
1370 | 667 | (len(lastequality) <= max(length_insertions2, length_deletions2))): | ||
1371 | 668 | # Duplicate record. | ||
1372 | 669 | diffs.insert(equalities[-1], (self.DIFF_DELETE, lastequality)) | ||
1373 | 670 | # Change second copy to insert. | ||
1374 | 671 | diffs[equalities[-1] + 1] = (self.DIFF_INSERT, | ||
1375 | 672 | diffs[equalities[-1] + 1][1]) | ||
1376 | 673 | # Throw away the equality we just deleted. | ||
1377 | 674 | equalities.pop() | ||
1378 | 675 | # Throw away the previous equality (it needs to be reevaluated). | ||
1379 | 676 | if len(equalities): | ||
1380 | 677 | equalities.pop() | ||
1381 | 678 | if len(equalities): | ||
1382 | 679 | pointer = equalities[-1] | ||
1383 | 680 | else: | ||
1384 | 681 | pointer = -1 | ||
1385 | 682 | # Reset the counters. | ||
1386 | 683 | length_insertions1, length_deletions1 = 0, 0 | ||
1387 | 684 | length_insertions2, length_deletions2 = 0, 0 | ||
1388 | 685 | lastequality = None | ||
1389 | 686 | changes = True | ||
1390 | 687 | pointer += 1 | ||
1391 | 688 | |||
1392 | 689 | # Normalize the diff. | ||
1393 | 690 | if changes: | ||
1394 | 691 | self.diff_cleanupMerge(diffs) | ||
1395 | 692 | self.diff_cleanupSemanticLossless(diffs) | ||
1396 | 693 | |||
1397 | 694 | # Find any overlaps between deletions and insertions. | ||
1398 | 695 | # e.g: <del>abcxxx</del><ins>xxxdef</ins> | ||
1399 | 696 | # -> <del>abc</del>xxx<ins>def</ins> | ||
1400 | 697 | # e.g: <del>xxxabc</del><ins>defxxx</ins> | ||
1401 | 698 | # -> <ins>def</ins>xxx<del>abc</del> | ||
1402 | 699 | # Only extract an overlap if it is as big as the edit ahead or behind it. | ||
1403 | 700 | pointer = 1 | ||
1404 | 701 | while pointer < len(diffs): | ||
1405 | 702 | if (diffs[pointer - 1][0] == self.DIFF_DELETE and | ||
1406 | 703 | diffs[pointer][0] == self.DIFF_INSERT): | ||
1407 | 704 | deletion = diffs[pointer - 1][1] | ||
1408 | 705 | insertion = diffs[pointer][1] | ||
1409 | 706 | overlap_length1 = self.diff_commonOverlap(deletion, insertion) | ||
1410 | 707 | overlap_length2 = self.diff_commonOverlap(insertion, deletion) | ||
1411 | 708 | if overlap_length1 >= overlap_length2: | ||
1412 | 709 | if (overlap_length1 >= len(deletion) / 2.0 or | ||
1413 | 710 | overlap_length1 >= len(insertion) / 2.0): | ||
1414 | 711 | # Overlap found. Insert an equality and trim the surrounding edits. | ||
1415 | 712 | diffs.insert(pointer, (self.DIFF_EQUAL, | ||
1416 | 713 | insertion[:overlap_length1])) | ||
1417 | 714 | diffs[pointer - 1] = (self.DIFF_DELETE, | ||
1418 | 715 | deletion[:len(deletion) - overlap_length1]) | ||
1419 | 716 | diffs[pointer + 1] = (self.DIFF_INSERT, | ||
1420 | 717 | insertion[overlap_length1:]) | ||
1421 | 718 | pointer += 1 | ||
1422 | 719 | else: | ||
1423 | 720 | if (overlap_length2 >= len(deletion) / 2.0 or | ||
1424 | 721 | overlap_length2 >= len(insertion) / 2.0): | ||
1425 | 722 | # Reverse overlap found. | ||
1426 | 723 | # Insert an equality and swap and trim the surrounding edits. | ||
1427 | 724 | diffs.insert(pointer, (self.DIFF_EQUAL, deletion[:overlap_length2])) | ||
1428 | 725 | diffs[pointer - 1] = (self.DIFF_INSERT, | ||
1429 | 726 | insertion[:len(insertion) - overlap_length2]) | ||
1430 | 727 | diffs[pointer + 1] = (self.DIFF_DELETE, deletion[overlap_length2:]) | ||
1431 | 728 | pointer += 1 | ||
1432 | 729 | pointer += 1 | ||
1433 | 730 | pointer += 1 | ||
1434 | 731 | |||
1435 | 732 | def diff_cleanupSemanticLossless(self, diffs): | ||
1436 | 733 | """Look for single edits surrounded on both sides by equalities | ||
1437 | 734 | which can be shifted sideways to align the edit to a word boundary. | ||
1438 | 735 | e.g: The c<ins>at c</ins>ame. -> The <ins>cat </ins>came. | ||
1439 | 736 | |||
1440 | 737 | Args: | ||
1441 | 738 | diffs: Array of diff tuples. | ||
1442 | 739 | """ | ||
1443 | 740 | |||
1444 | 741 | def diff_cleanupSemanticScore(one, two): | ||
1445 | 742 | """Given two strings, compute a score representing whether the | ||
1446 | 743 | internal boundary falls on logical boundaries. | ||
1447 | 744 | Scores range from 6 (best) to 0 (worst). | ||
1448 | 745 | Closure, but does not reference any external variables. | ||
1449 | 746 | |||
1450 | 747 | Args: | ||
1451 | 748 | one: First string. | ||
1452 | 749 | two: Second string. | ||
1453 | 750 | |||
1454 | 751 | Returns: | ||
1455 | 752 | The score. | ||
1456 | 753 | """ | ||
1457 | 754 | if not one or not two: | ||
1458 | 755 | # Edges are the best. | ||
1459 | 756 | return 6 | ||
1460 | 757 | |||
1461 | 758 | # Each port of this function behaves slightly differently due to | ||
1462 | 759 | # subtle differences in each language's definition of things like | ||
1463 | 760 | # 'whitespace'. Since this function's purpose is largely cosmetic, | ||
1464 | 761 | # the choice has been made to use each language's native features | ||
1465 | 762 | # rather than force total conformity. | ||
1466 | 763 | char1 = one[-1] | ||
1467 | 764 | char2 = two[0] | ||
1468 | 765 | nonAlphaNumeric1 = not char1.isalnum() | ||
1469 | 766 | nonAlphaNumeric2 = not char2.isalnum() | ||
1470 | 767 | whitespace1 = nonAlphaNumeric1 and char1.isspace() | ||
1471 | 768 | whitespace2 = nonAlphaNumeric2 and char2.isspace() | ||
1472 | 769 | lineBreak1 = whitespace1 and (char1 == "\r" or char1 == "\n") | ||
1473 | 770 | lineBreak2 = whitespace2 and (char2 == "\r" or char2 == "\n") | ||
1474 | 771 | blankLine1 = lineBreak1 and self.BLANKLINEEND.search(one) | ||
1475 | 772 | blankLine2 = lineBreak2 and self.BLANKLINESTART.match(two) | ||
1476 | 773 | |||
1477 | 774 | if blankLine1 or blankLine2: | ||
1478 | 775 | # Five points for blank lines. | ||
1479 | 776 | return 5 | ||
1480 | 777 | elif lineBreak1 or lineBreak2: | ||
1481 | 778 | # Four points for line breaks. | ||
1482 | 779 | return 4 | ||
1483 | 780 | elif nonAlphaNumeric1 and not whitespace1 and whitespace2: | ||
1484 | 781 | # Three points for end of sentences. | ||
1485 | 782 | return 3 | ||
1486 | 783 | elif whitespace1 or whitespace2: | ||
1487 | 784 | # Two points for whitespace. | ||
1488 | 785 | return 2 | ||
1489 | 786 | elif nonAlphaNumeric1 or nonAlphaNumeric2: | ||
1490 | 787 | # One point for non-alphanumeric. | ||
1491 | 788 | return 1 | ||
1492 | 789 | return 0 | ||
1493 | 790 | |||
1494 | 791 | pointer = 1 | ||
1495 | 792 | # Intentionally ignore the first and last element (don't need checking). | ||
1496 | 793 | while pointer < len(diffs) - 1: | ||
1497 | 794 | if (diffs[pointer - 1][0] == self.DIFF_EQUAL and | ||
1498 | 795 | diffs[pointer + 1][0] == self.DIFF_EQUAL): | ||
1499 | 796 | # This is a single edit surrounded by equalities. | ||
1500 | 797 | equality1 = diffs[pointer - 1][1] | ||
1501 | 798 | edit = diffs[pointer][1] | ||
1502 | 799 | equality2 = diffs[pointer + 1][1] | ||
1503 | 800 | |||
1504 | 801 | # First, shift the edit as far left as possible. | ||
1505 | 802 | commonOffset = self.diff_commonSuffix(equality1, edit) | ||
1506 | 803 | if commonOffset: | ||
1507 | 804 | commonString = edit[-commonOffset:] | ||
1508 | 805 | equality1 = equality1[:-commonOffset] | ||
1509 | 806 | edit = commonString + edit[:-commonOffset] | ||
1510 | 807 | equality2 = commonString + equality2 | ||
1511 | 808 | |||
1512 | 809 | # Second, step character by character right, looking for the best fit. | ||
1513 | 810 | bestEquality1 = equality1 | ||
1514 | 811 | bestEdit = edit | ||
1515 | 812 | bestEquality2 = equality2 | ||
1516 | 813 | bestScore = (diff_cleanupSemanticScore(equality1, edit) + | ||
1517 | 814 | diff_cleanupSemanticScore(edit, equality2)) | ||
1518 | 815 | while edit and equality2 and edit[0] == equality2[0]: | ||
1519 | 816 | equality1 += edit[0] | ||
1520 | 817 | edit = edit[1:] + equality2[0] | ||
1521 | 818 | equality2 = equality2[1:] | ||
1522 | 819 | score = (diff_cleanupSemanticScore(equality1, edit) + | ||
1523 | 820 | diff_cleanupSemanticScore(edit, equality2)) | ||
1524 | 821 | # The >= encourages trailing rather than leading whitespace on edits. | ||
1525 | 822 | if score >= bestScore: | ||
1526 | 823 | bestScore = score | ||
1527 | 824 | bestEquality1 = equality1 | ||
1528 | 825 | bestEdit = edit | ||
1529 | 826 | bestEquality2 = equality2 | ||
1530 | 827 | |||
1531 | 828 | if diffs[pointer - 1][1] != bestEquality1: | ||
1532 | 829 | # We have an improvement, save it back to the diff. | ||
1533 | 830 | if bestEquality1: | ||
1534 | 831 | diffs[pointer - 1] = (diffs[pointer - 1][0], bestEquality1) | ||
1535 | 832 | else: | ||
1536 | 833 | del diffs[pointer - 1] | ||
1537 | 834 | pointer -= 1 | ||
1538 | 835 | diffs[pointer] = (diffs[pointer][0], bestEdit) | ||
1539 | 836 | if bestEquality2: | ||
1540 | 837 | diffs[pointer + 1] = (diffs[pointer + 1][0], bestEquality2) | ||
1541 | 838 | else: | ||
1542 | 839 | del diffs[pointer + 1] | ||
1543 | 840 | pointer -= 1 | ||
1544 | 841 | pointer += 1 | ||
1545 | 842 | |||
1546 | 843 | # Define some regex patterns for matching boundaries. | ||
1547 | 844 | BLANKLINEEND = re.compile(r"\n\r?\n$"); | ||
1548 | 845 | BLANKLINESTART = re.compile(r"^\r?\n\r?\n"); | ||
1549 | 846 | |||
1550 | 847 | def diff_cleanupEfficiency(self, diffs): | ||
1551 | 848 | """Reduce the number of edits by eliminating operationally trivial | ||
1552 | 849 | equalities. | ||
1553 | 850 | |||
1554 | 851 | Args: | ||
1555 | 852 | diffs: Array of diff tuples. | ||
1556 | 853 | """ | ||
1557 | 854 | changes = False | ||
1558 | 855 | equalities = [] # Stack of indices where equalities are found. | ||
1559 | 856 | lastequality = None # Always equal to diffs[equalities[-1]][1] | ||
1560 | 857 | pointer = 0 # Index of current position. | ||
1561 | 858 | pre_ins = False # Is there an insertion operation before the last equality. | ||
1562 | 859 | pre_del = False # Is there a deletion operation before the last equality. | ||
1563 | 860 | post_ins = False # Is there an insertion operation after the last equality. | ||
1564 | 861 | post_del = False # Is there a deletion operation after the last equality. | ||
1565 | 862 | while pointer < len(diffs): | ||
1566 | 863 | if diffs[pointer][0] == self.DIFF_EQUAL: # Equality found. | ||
1567 | 864 | if (len(diffs[pointer][1]) < self.Diff_EditCost and | ||
1568 | 865 | (post_ins or post_del)): | ||
1569 | 866 | # Candidate found. | ||
1570 | 867 | equalities.append(pointer) | ||
1571 | 868 | pre_ins = post_ins | ||
1572 | 869 | pre_del = post_del | ||
1573 | 870 | lastequality = diffs[pointer][1] | ||
1574 | 871 | else: | ||
1575 | 872 | # Not a candidate, and can never become one. | ||
1576 | 873 | equalities = [] | ||
1577 | 874 | lastequality = None | ||
1578 | 875 | |||
1579 | 876 | post_ins = post_del = False | ||
1580 | 877 | else: # An insertion or deletion. | ||
1581 | 878 | if diffs[pointer][0] == self.DIFF_DELETE: | ||
1582 | 879 | post_del = True | ||
1583 | 880 | else: | ||
1584 | 881 | post_ins = True | ||
1585 | 882 | |||
1586 | 883 | # Five types to be split: | ||
1587 | 884 | # <ins>A</ins><del>B</del>XY<ins>C</ins><del>D</del> | ||
1588 | 885 | # <ins>A</ins>X<ins>C</ins><del>D</del> | ||
1589 | 886 | # <ins>A</ins><del>B</del>X<ins>C</ins> | ||
1590 | 887 | # <ins>A</del>X<ins>C</ins><del>D</del> | ||
1591 | 888 | # <ins>A</ins><del>B</del>X<del>C</del> | ||
1592 | 889 | |||
1593 | 890 | if lastequality and ((pre_ins and pre_del and post_ins and post_del) or | ||
1594 | 891 | ((len(lastequality) < self.Diff_EditCost / 2) and | ||
1595 | 892 | (pre_ins + pre_del + post_ins + post_del) == 3)): | ||
1596 | 893 | # Duplicate record. | ||
1597 | 894 | diffs.insert(equalities[-1], (self.DIFF_DELETE, lastequality)) | ||
1598 | 895 | # Change second copy to insert. | ||
1599 | 896 | diffs[equalities[-1] + 1] = (self.DIFF_INSERT, | ||
1600 | 897 | diffs[equalities[-1] + 1][1]) | ||
1601 | 898 | equalities.pop() # Throw away the equality we just deleted. | ||
1602 | 899 | lastequality = None | ||
1603 | 900 | if pre_ins and pre_del: | ||
1604 | 901 | # No changes made which could affect previous entry, keep going. | ||
1605 | 902 | post_ins = post_del = True | ||
1606 | 903 | equalities = [] | ||
1607 | 904 | else: | ||
1608 | 905 | if len(equalities): | ||
1609 | 906 | equalities.pop() # Throw away the previous equality. | ||
1610 | 907 | if len(equalities): | ||
1611 | 908 | pointer = equalities[-1] | ||
1612 | 909 | else: | ||
1613 | 910 | pointer = -1 | ||
1614 | 911 | post_ins = post_del = False | ||
1615 | 912 | changes = True | ||
1616 | 913 | pointer += 1 | ||
1617 | 914 | |||
1618 | 915 | if changes: | ||
1619 | 916 | self.diff_cleanupMerge(diffs) | ||
1620 | 917 | |||
1621 | 918 | def diff_cleanupMerge(self, diffs): | ||
1622 | 919 | """Reorder and merge like edit sections. Merge equalities. | ||
1623 | 920 | Any edit section can move as long as it doesn't cross an equality. | ||
1624 | 921 | |||
1625 | 922 | Args: | ||
1626 | 923 | diffs: Array of diff tuples. | ||
1627 | 924 | """ | ||
1628 | 925 | diffs.append((self.DIFF_EQUAL, '')) # Add a dummy entry at the end. | ||
1629 | 926 | pointer = 0 | ||
1630 | 927 | count_delete = 0 | ||
1631 | 928 | count_insert = 0 | ||
1632 | 929 | text_delete = '' | ||
1633 | 930 | text_insert = '' | ||
1634 | 931 | while pointer < len(diffs): | ||
1635 | 932 | if diffs[pointer][0] == self.DIFF_INSERT: | ||
1636 | 933 | count_insert += 1 | ||
1637 | 934 | text_insert += diffs[pointer][1] | ||
1638 | 935 | pointer += 1 | ||
1639 | 936 | elif diffs[pointer][0] == self.DIFF_DELETE: | ||
1640 | 937 | count_delete += 1 | ||
1641 | 938 | text_delete += diffs[pointer][1] | ||
1642 | 939 | pointer += 1 | ||
1643 | 940 | elif diffs[pointer][0] == self.DIFF_EQUAL: | ||
1644 | 941 | # Upon reaching an equality, check for prior redundancies. | ||
1645 | 942 | if count_delete + count_insert > 1: | ||
1646 | 943 | if count_delete != 0 and count_insert != 0: | ||
1647 | 944 | # Factor out any common prefixies. | ||
1648 | 945 | commonlength = self.diff_commonPrefix(text_insert, text_delete) | ||
1649 | 946 | if commonlength != 0: | ||
1650 | 947 | x = pointer - count_delete - count_insert - 1 | ||
1651 | 948 | if x >= 0 and diffs[x][0] == self.DIFF_EQUAL: | ||
1652 | 949 | diffs[x] = (diffs[x][0], diffs[x][1] + | ||
1653 | 950 | text_insert[:commonlength]) | ||
1654 | 951 | else: | ||
1655 | 952 | diffs.insert(0, (self.DIFF_EQUAL, text_insert[:commonlength])) | ||
1656 | 953 | pointer += 1 | ||
1657 | 954 | text_insert = text_insert[commonlength:] | ||
1658 | 955 | text_delete = text_delete[commonlength:] | ||
1659 | 956 | # Factor out any common suffixies. | ||
1660 | 957 | commonlength = self.diff_commonSuffix(text_insert, text_delete) | ||
1661 | 958 | if commonlength != 0: | ||
1662 | 959 | diffs[pointer] = (diffs[pointer][0], text_insert[-commonlength:] + | ||
1663 | 960 | diffs[pointer][1]) | ||
1664 | 961 | text_insert = text_insert[:-commonlength] | ||
1665 | 962 | text_delete = text_delete[:-commonlength] | ||
1666 | 963 | # Delete the offending records and add the merged ones. | ||
1667 | 964 | if count_delete == 0: | ||
1668 | 965 | diffs[pointer - count_insert : pointer] = [ | ||
1669 | 966 | (self.DIFF_INSERT, text_insert)] | ||
1670 | 967 | elif count_insert == 0: | ||
1671 | 968 | diffs[pointer - count_delete : pointer] = [ | ||
1672 | 969 | (self.DIFF_DELETE, text_delete)] | ||
1673 | 970 | else: | ||
1674 | 971 | diffs[pointer - count_delete - count_insert : pointer] = [ | ||
1675 | 972 | (self.DIFF_DELETE, text_delete), | ||
1676 | 973 | (self.DIFF_INSERT, text_insert)] | ||
1677 | 974 | pointer = pointer - count_delete - count_insert + 1 | ||
1678 | 975 | if count_delete != 0: | ||
1679 | 976 | pointer += 1 | ||
1680 | 977 | if count_insert != 0: | ||
1681 | 978 | pointer += 1 | ||
1682 | 979 | elif pointer != 0 and diffs[pointer - 1][0] == self.DIFF_EQUAL: | ||
1683 | 980 | # Merge this equality with the previous one. | ||
1684 | 981 | diffs[pointer - 1] = (diffs[pointer - 1][0], | ||
1685 | 982 | diffs[pointer - 1][1] + diffs[pointer][1]) | ||
1686 | 983 | del diffs[pointer] | ||
1687 | 984 | else: | ||
1688 | 985 | pointer += 1 | ||
1689 | 986 | |||
1690 | 987 | count_insert = 0 | ||
1691 | 988 | count_delete = 0 | ||
1692 | 989 | text_delete = '' | ||
1693 | 990 | text_insert = '' | ||
1694 | 991 | |||
1695 | 992 | if diffs[-1][1] == '': | ||
1696 | 993 | diffs.pop() # Remove the dummy entry at the end. | ||
1697 | 994 | |||
1698 | 995 | # Second pass: look for single edits surrounded on both sides by equalities | ||
1699 | 996 | # which can be shifted sideways to eliminate an equality. | ||
1700 | 997 | # e.g: A<ins>BA</ins>C -> <ins>AB</ins>AC | ||
1701 | 998 | changes = False | ||
1702 | 999 | pointer = 1 | ||
1703 | 1000 | # Intentionally ignore the first and last element (don't need checking). | ||
1704 | 1001 | while pointer < len(diffs) - 1: | ||
1705 | 1002 | if (diffs[pointer - 1][0] == self.DIFF_EQUAL and | ||
1706 | 1003 | diffs[pointer + 1][0] == self.DIFF_EQUAL): | ||
1707 | 1004 | # This is a single edit surrounded by equalities. | ||
1708 | 1005 | if diffs[pointer][1].endswith(diffs[pointer - 1][1]): | ||
1709 | 1006 | # Shift the edit over the previous equality. | ||
1710 | 1007 | diffs[pointer] = (diffs[pointer][0], | ||
1711 | 1008 | diffs[pointer - 1][1] + | ||
1712 | 1009 | diffs[pointer][1][:-len(diffs[pointer - 1][1])]) | ||
1713 | 1010 | diffs[pointer + 1] = (diffs[pointer + 1][0], | ||
1714 | 1011 | diffs[pointer - 1][1] + diffs[pointer + 1][1]) | ||
1715 | 1012 | del diffs[pointer - 1] | ||
1716 | 1013 | changes = True | ||
1717 | 1014 | elif diffs[pointer][1].startswith(diffs[pointer + 1][1]): | ||
1718 | 1015 | # Shift the edit over the next equality. | ||
1719 | 1016 | diffs[pointer - 1] = (diffs[pointer - 1][0], | ||
1720 | 1017 | diffs[pointer - 1][1] + diffs[pointer + 1][1]) | ||
1721 | 1018 | diffs[pointer] = (diffs[pointer][0], | ||
1722 | 1019 | diffs[pointer][1][len(diffs[pointer + 1][1]):] + | ||
1723 | 1020 | diffs[pointer + 1][1]) | ||
1724 | 1021 | del diffs[pointer + 1] | ||
1725 | 1022 | changes = True | ||
1726 | 1023 | pointer += 1 | ||
1727 | 1024 | |||
1728 | 1025 | # If shifts were made, the diff needs reordering and another shift sweep. | ||
1729 | 1026 | if changes: | ||
1730 | 1027 | self.diff_cleanupMerge(diffs) | ||
1731 | 1028 | |||
1732 | 1029 | def diff_xIndex(self, diffs, loc): | ||
1733 | 1030 | """loc is a location in text1, compute and return the equivalent location | ||
1734 | 1031 | in text2. e.g. "The cat" vs "The big cat", 1->1, 5->8 | ||
1735 | 1032 | |||
1736 | 1033 | Args: | ||
1737 | 1034 | diffs: Array of diff tuples. | ||
1738 | 1035 | loc: Location within text1. | ||
1739 | 1036 | |||
1740 | 1037 | Returns: | ||
1741 | 1038 | Location within text2. | ||
1742 | 1039 | """ | ||
1743 | 1040 | chars1 = 0 | ||
1744 | 1041 | chars2 = 0 | ||
1745 | 1042 | last_chars1 = 0 | ||
1746 | 1043 | last_chars2 = 0 | ||
1747 | 1044 | for x in xrange(len(diffs)): | ||
1748 | 1045 | (op, text) = diffs[x] | ||
1749 | 1046 | if op != self.DIFF_INSERT: # Equality or deletion. | ||
1750 | 1047 | chars1 += len(text) | ||
1751 | 1048 | if op != self.DIFF_DELETE: # Equality or insertion. | ||
1752 | 1049 | chars2 += len(text) | ||
1753 | 1050 | if chars1 > loc: # Overshot the location. | ||
1754 | 1051 | break | ||
1755 | 1052 | last_chars1 = chars1 | ||
1756 | 1053 | last_chars2 = chars2 | ||
1757 | 1054 | |||
1758 | 1055 | if len(diffs) != x and diffs[x][0] == self.DIFF_DELETE: | ||
1759 | 1056 | # The location was deleted. | ||
1760 | 1057 | return last_chars2 | ||
1761 | 1058 | # Add the remaining len(character). | ||
1762 | 1059 | return last_chars2 + (loc - last_chars1) | ||
1763 | 1060 | |||
1764 | 1061 | def diff_prettyHtml(self, diffs): | ||
1765 | 1062 | """Convert a diff array into a pretty HTML report. | ||
1766 | 1063 | |||
1767 | 1064 | Args: | ||
1768 | 1065 | diffs: Array of diff tuples. | ||
1769 | 1066 | |||
1770 | 1067 | Returns: | ||
1771 | 1068 | HTML representation. | ||
1772 | 1069 | """ | ||
1773 | 1070 | html = [] | ||
1774 | 1071 | for (op, data) in diffs: | ||
1775 | 1072 | text = (data.replace("&", "&").replace("<", "<") | ||
1776 | 1073 | .replace(">", ">").replace("\n", "¶<br>")) | ||
1777 | 1074 | if op == self.DIFF_INSERT: | ||
1778 | 1075 | html.append("<ins style=\"background:#e6ffe6;\">%s</ins>" % text) | ||
1779 | 1076 | elif op == self.DIFF_DELETE: | ||
1780 | 1077 | html.append("<del style=\"background:#ffe6e6;\">%s</del>" % text) | ||
1781 | 1078 | elif op == self.DIFF_EQUAL: | ||
1782 | 1079 | html.append("<span>%s</span>" % text) | ||
1783 | 1080 | return "".join(html) | ||
1784 | 1081 | |||
1785 | 1082 | def diff_text1(self, diffs): | ||
1786 | 1083 | """Compute and return the source text (all equalities and deletions). | ||
1787 | 1084 | |||
1788 | 1085 | Args: | ||
1789 | 1086 | diffs: Array of diff tuples. | ||
1790 | 1087 | |||
1791 | 1088 | Returns: | ||
1792 | 1089 | Source text. | ||
1793 | 1090 | """ | ||
1794 | 1091 | text = [] | ||
1795 | 1092 | for (op, data) in diffs: | ||
1796 | 1093 | if op != self.DIFF_INSERT: | ||
1797 | 1094 | text.append(data) | ||
1798 | 1095 | return "".join(text) | ||
1799 | 1096 | |||
1800 | 1097 | def diff_text2(self, diffs): | ||
1801 | 1098 | """Compute and return the destination text (all equalities and insertions). | ||
1802 | 1099 | |||
1803 | 1100 | Args: | ||
1804 | 1101 | diffs: Array of diff tuples. | ||
1805 | 1102 | |||
1806 | 1103 | Returns: | ||
1807 | 1104 | Destination text. | ||
1808 | 1105 | """ | ||
1809 | 1106 | text = [] | ||
1810 | 1107 | for (op, data) in diffs: | ||
1811 | 1108 | if op != self.DIFF_DELETE: | ||
1812 | 1109 | text.append(data) | ||
1813 | 1110 | return "".join(text) | ||
1814 | 1111 | |||
1815 | 1112 | def diff_levenshtein(self, diffs): | ||
1816 | 1113 | """Compute the Levenshtein distance; the number of inserted, deleted or | ||
1817 | 1114 | substituted characters. | ||
1818 | 1115 | |||
1819 | 1116 | Args: | ||
1820 | 1117 | diffs: Array of diff tuples. | ||
1821 | 1118 | |||
1822 | 1119 | Returns: | ||
1823 | 1120 | Number of changes. | ||
1824 | 1121 | """ | ||
1825 | 1122 | levenshtein = 0 | ||
1826 | 1123 | insertions = 0 | ||
1827 | 1124 | deletions = 0 | ||
1828 | 1125 | for (op, data) in diffs: | ||
1829 | 1126 | if op == self.DIFF_INSERT: | ||
1830 | 1127 | insertions += len(data) | ||
1831 | 1128 | elif op == self.DIFF_DELETE: | ||
1832 | 1129 | deletions += len(data) | ||
1833 | 1130 | elif op == self.DIFF_EQUAL: | ||
1834 | 1131 | # A deletion and an insertion is one substitution. | ||
1835 | 1132 | levenshtein += max(insertions, deletions) | ||
1836 | 1133 | insertions = 0 | ||
1837 | 1134 | deletions = 0 | ||
1838 | 1135 | levenshtein += max(insertions, deletions) | ||
1839 | 1136 | return levenshtein | ||
1840 | 1137 | |||
1841 | 1138 | def diff_toDelta(self, diffs): | ||
1842 | 1139 | """Crush the diff into an encoded string which describes the operations | ||
1843 | 1140 | required to transform text1 into text2. | ||
1844 | 1141 | E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'. | ||
1845 | 1142 | Operations are tab-separated. Inserted text is escaped using %xx notation. | ||
1846 | 1143 | |||
1847 | 1144 | Args: | ||
1848 | 1145 | diffs: Array of diff tuples. | ||
1849 | 1146 | |||
1850 | 1147 | Returns: | ||
1851 | 1148 | Delta text. | ||
1852 | 1149 | """ | ||
1853 | 1150 | text = [] | ||
1854 | 1151 | for (op, data) in diffs: | ||
1855 | 1152 | if op == self.DIFF_INSERT: | ||
1856 | 1153 | # High ascii will raise UnicodeDecodeError. Use Unicode instead. | ||
1857 | 1154 | data = data.encode("utf-8") | ||
1858 | 1155 | text.append("+" + urllib.quote(data, "!~*'();/?:@&=+$,# ")) | ||
1859 | 1156 | elif op == self.DIFF_DELETE: | ||
1860 | 1157 | text.append("-%d" % len(data)) | ||
1861 | 1158 | elif op == self.DIFF_EQUAL: | ||
1862 | 1159 | text.append("=%d" % len(data)) | ||
1863 | 1160 | return "\t".join(text) | ||
1864 | 1161 | |||
1865 | 1162 | def diff_fromDelta(self, text1, delta): | ||
1866 | 1163 | """Given the original text1, and an encoded string which describes the | ||
1867 | 1164 | operations required to transform text1 into text2, compute the full diff. | ||
1868 | 1165 | |||
1869 | 1166 | Args: | ||
1870 | 1167 | text1: Source string for the diff. | ||
1871 | 1168 | delta: Delta text. | ||
1872 | 1169 | |||
1873 | 1170 | Returns: | ||
1874 | 1171 | Array of diff tuples. | ||
1875 | 1172 | |||
1876 | 1173 | Raises: | ||
1877 | 1174 | ValueError: If invalid input. | ||
1878 | 1175 | """ | ||
1879 | 1176 | if type(delta) == unicode: | ||
1880 | 1177 | # Deltas should be composed of a subset of ascii chars, Unicode not | ||
1881 | 1178 | # required. If this encode raises UnicodeEncodeError, delta is invalid. | ||
1882 | 1179 | delta = delta.encode("ascii") | ||
1883 | 1180 | diffs = [] | ||
1884 | 1181 | pointer = 0 # Cursor in text1 | ||
1885 | 1182 | tokens = delta.split("\t") | ||
1886 | 1183 | for token in tokens: | ||
1887 | 1184 | if token == "": | ||
1888 | 1185 | # Blank tokens are ok (from a trailing \t). | ||
1889 | 1186 | continue | ||
1890 | 1187 | # Each token begins with a one character parameter which specifies the | ||
1891 | 1188 | # operation of this token (delete, insert, equality). | ||
1892 | 1189 | param = token[1:] | ||
1893 | 1190 | if token[0] == "+": | ||
1894 | 1191 | param = urllib.unquote(param).decode("utf-8") | ||
1895 | 1192 | diffs.append((self.DIFF_INSERT, param)) | ||
1896 | 1193 | elif token[0] == "-" or token[0] == "=": | ||
1897 | 1194 | try: | ||
1898 | 1195 | n = int(param) | ||
1899 | 1196 | except ValueError: | ||
1900 | 1197 | raise ValueError("Invalid number in diff_fromDelta: " + param) | ||
1901 | 1198 | if n < 0: | ||
1902 | 1199 | raise ValueError("Negative number in diff_fromDelta: " + param) | ||
1903 | 1200 | text = text1[pointer : pointer + n] | ||
1904 | 1201 | pointer += n | ||
1905 | 1202 | if token[0] == "=": | ||
1906 | 1203 | diffs.append((self.DIFF_EQUAL, text)) | ||
1907 | 1204 | else: | ||
1908 | 1205 | diffs.append((self.DIFF_DELETE, text)) | ||
1909 | 1206 | else: | ||
1910 | 1207 | # Anything else is an error. | ||
1911 | 1208 | raise ValueError("Invalid diff operation in diff_fromDelta: " + | ||
1912 | 1209 | token[0]) | ||
1913 | 1210 | if pointer != len(text1): | ||
1914 | 1211 | raise ValueError( | ||
1915 | 1212 | "Delta length (%d) does not equal source text length (%d)." % | ||
1916 | 1213 | (pointer, len(text1))) | ||
1917 | 1214 | return diffs | ||
1918 | 1215 | |||
1919 | 1216 | # MATCH FUNCTIONS | ||
1920 | 1217 | |||
1921 | 1218 | def match_main(self, text, pattern, loc): | ||
1922 | 1219 | """Locate the best instance of 'pattern' in 'text' near 'loc'. | ||
1923 | 1220 | |||
1924 | 1221 | Args: | ||
1925 | 1222 | text: The text to search. | ||
1926 | 1223 | pattern: The pattern to search for. | ||
1927 | 1224 | loc: The location to search around. | ||
1928 | 1225 | |||
1929 | 1226 | Returns: | ||
1930 | 1227 | Best match index or -1. | ||
1931 | 1228 | """ | ||
1932 | 1229 | # Check for null inputs. | ||
1933 | 1230 | if text == None or pattern == None: | ||
1934 | 1231 | raise ValueError("Null inputs. (match_main)") | ||
1935 | 1232 | |||
1936 | 1233 | loc = max(0, min(loc, len(text))) | ||
1937 | 1234 | if text == pattern: | ||
1938 | 1235 | # Shortcut (potentially not guaranteed by the algorithm) | ||
1939 | 1236 | return 0 | ||
1940 | 1237 | elif not text: | ||
1941 | 1238 | # Nothing to match. | ||
1942 | 1239 | return -1 | ||
1943 | 1240 | elif text[loc:loc + len(pattern)] == pattern: | ||
1944 | 1241 | # Perfect match at the perfect spot! (Includes case of null pattern) | ||
1945 | 1242 | return loc | ||
1946 | 1243 | else: | ||
1947 | 1244 | # Do a fuzzy compare. | ||
1948 | 1245 | match = self.match_bitap(text, pattern, loc) | ||
1949 | 1246 | return match | ||
1950 | 1247 | |||
1951 | 1248 | def match_bitap(self, text, pattern, loc): | ||
1952 | 1249 | """Locate the best instance of 'pattern' in 'text' near 'loc' using the | ||
1953 | 1250 | Bitap algorithm. | ||
1954 | 1251 | |||
1955 | 1252 | Args: | ||
1956 | 1253 | text: The text to search. | ||
1957 | 1254 | pattern: The pattern to search for. | ||
1958 | 1255 | loc: The location to search around. | ||
1959 | 1256 | |||
1960 | 1257 | Returns: | ||
1961 | 1258 | Best match index or -1. | ||
1962 | 1259 | """ | ||
1963 | 1260 | # Python doesn't have a maxint limit, so ignore this check. | ||
1964 | 1261 | #if self.Match_MaxBits != 0 and len(pattern) > self.Match_MaxBits: | ||
1965 | 1262 | # raise ValueError("Pattern too long for this application.") | ||
1966 | 1263 | |||
1967 | 1264 | # Initialise the alphabet. | ||
1968 | 1265 | s = self.match_alphabet(pattern) | ||
1969 | 1266 | |||
1970 | 1267 | def match_bitapScore(e, x): | ||
1971 | 1268 | """Compute and return the score for a match with e errors and x location. | ||
1972 | 1269 | Accesses loc and pattern through being a closure. | ||
1973 | 1270 | |||
1974 | 1271 | Args: | ||
1975 | 1272 | e: Number of errors in match. | ||
1976 | 1273 | x: Location of match. | ||
1977 | 1274 | |||
1978 | 1275 | Returns: | ||
1979 | 1276 | Overall score for match (0.0 = good, 1.0 = bad). | ||
1980 | 1277 | """ | ||
1981 | 1278 | accuracy = float(e) / len(pattern) | ||
1982 | 1279 | proximity = abs(loc - x) | ||
1983 | 1280 | if not self.Match_Distance: | ||
1984 | 1281 | # Dodge divide by zero error. | ||
1985 | 1282 | return proximity and 1.0 or accuracy | ||
1986 | 1283 | return accuracy + (proximity / float(self.Match_Distance)) | ||
1987 | 1284 | |||
1988 | 1285 | # Highest score beyond which we give up. | ||
1989 | 1286 | score_threshold = self.Match_Threshold | ||
1990 | 1287 | # Is there a nearby exact match? (speedup) | ||
1991 | 1288 | best_loc = text.find(pattern, loc) | ||
1992 | 1289 | if best_loc != -1: | ||
1993 | 1290 | score_threshold = min(match_bitapScore(0, best_loc), score_threshold) | ||
1994 | 1291 | # What about in the other direction? (speedup) | ||
1995 | 1292 | best_loc = text.rfind(pattern, loc + len(pattern)) | ||
1996 | 1293 | if best_loc != -1: | ||
1997 | 1294 | score_threshold = min(match_bitapScore(0, best_loc), score_threshold) | ||
1998 | 1295 | |||
1999 | 1296 | # Initialise the bit arrays. | ||
2000 | 1297 | matchmask = 1 << (len(pattern) - 1) | ||
2001 | 1298 | best_loc = -1 | ||
2002 | 1299 | |||
2003 | 1300 | bin_max = len(pattern) + len(text) | ||
2004 | 1301 | # Empty initialization added to appease pychecker. | ||
2005 | 1302 | last_rd = None | ||
2006 | 1303 | for d in xrange(len(pattern)): | ||
2007 | 1304 | # Scan for the best match each iteration allows for one more error. | ||
2008 | 1305 | # Run a binary search to determine how far from 'loc' we can stray at | ||
2009 | 1306 | # this error level. | ||
2010 | 1307 | bin_min = 0 | ||
2011 | 1308 | bin_mid = bin_max | ||
2012 | 1309 | while bin_min < bin_mid: | ||
2013 | 1310 | if match_bitapScore(d, loc + bin_mid) <= score_threshold: | ||
2014 | 1311 | bin_min = bin_mid | ||
2015 | 1312 | else: | ||
2016 | 1313 | bin_max = bin_mid | ||
2017 | 1314 | bin_mid = (bin_max - bin_min) // 2 + bin_min | ||
2018 | 1315 | |||
2019 | 1316 | # Use the result from this iteration as the maximum for the next. | ||
2020 | 1317 | bin_max = bin_mid | ||
2021 | 1318 | start = max(1, loc - bin_mid + 1) | ||
2022 | 1319 | finish = min(loc + bin_mid, len(text)) + len(pattern) | ||
2023 | 1320 | |||
2024 | 1321 | rd = [0] * (finish + 2) | ||
2025 | 1322 | rd[finish + 1] = (1 << d) - 1 | ||
2026 | 1323 | for j in xrange(finish, start - 1, -1): | ||
2027 | 1324 | if len(text) <= j - 1: | ||
2028 | 1325 | # Out of range. | ||
2029 | 1326 | charMatch = 0 | ||
2030 | 1327 | else: | ||
2031 | 1328 | charMatch = s.get(text[j - 1], 0) | ||
2032 | 1329 | if d == 0: # First pass: exact match. | ||
2033 | 1330 | rd[j] = ((rd[j + 1] << 1) | 1) & charMatch | ||
2034 | 1331 | else: # Subsequent passes: fuzzy match. | ||
2035 | 1332 | rd[j] = (((rd[j + 1] << 1) | 1) & charMatch) | ( | ||
2036 | 1333 | ((last_rd[j + 1] | last_rd[j]) << 1) | 1) | last_rd[j + 1] | ||
2037 | 1334 | if rd[j] & matchmask: | ||
2038 | 1335 | score = match_bitapScore(d, j - 1) | ||
2039 | 1336 | # This match will almost certainly be better than any existing match. | ||
2040 | 1337 | # But check anyway. | ||
2041 | 1338 | if score <= score_threshold: | ||
2042 | 1339 | # Told you so. | ||
2043 | 1340 | score_threshold = score | ||
2044 | 1341 | best_loc = j - 1 | ||
2045 | 1342 | if best_loc > loc: | ||
2046 | 1343 | # When passing loc, don't exceed our current distance from loc. | ||
2047 | 1344 | start = max(1, 2 * loc - best_loc) | ||
2048 | 1345 | else: | ||
2049 | 1346 | # Already passed loc, downhill from here on in. | ||
2050 | 1347 | break | ||
2051 | 1348 | # No hope for a (better) match at greater error levels. | ||
2052 | 1349 | if match_bitapScore(d + 1, loc) > score_threshold: | ||
2053 | 1350 | break | ||
2054 | 1351 | last_rd = rd | ||
2055 | 1352 | return best_loc | ||
2056 | 1353 | |||
2057 | 1354 | def match_alphabet(self, pattern): | ||
2058 | 1355 | """Initialise the alphabet for the Bitap algorithm. | ||
2059 | 1356 | |||
2060 | 1357 | Args: | ||
2061 | 1358 | pattern: The text to encode. | ||
2062 | 1359 | |||
2063 | 1360 | Returns: | ||
2064 | 1361 | Hash of character locations. | ||
2065 | 1362 | """ | ||
2066 | 1363 | s = {} | ||
2067 | 1364 | for char in pattern: | ||
2068 | 1365 | s[char] = 0 | ||
2069 | 1366 | for i in xrange(len(pattern)): | ||
2070 | 1367 | s[pattern[i]] |= 1 << (len(pattern) - i - 1) | ||
2071 | 1368 | return s | ||
2072 | 1369 | |||
2073 | 1370 | # PATCH FUNCTIONS | ||
2074 | 1371 | |||
2075 | 1372 | def patch_addContext(self, patch, text): | ||
2076 | 1373 | """Increase the context until it is unique, | ||
2077 | 1374 | but don't let the pattern expand beyond Match_MaxBits. | ||
2078 | 1375 | |||
2079 | 1376 | Args: | ||
2080 | 1377 | patch: The patch to grow. | ||
2081 | 1378 | text: Source text. | ||
2082 | 1379 | """ | ||
2083 | 1380 | if len(text) == 0: | ||
2084 | 1381 | return | ||
2085 | 1382 | pattern = text[patch.start2 : patch.start2 + patch.length1] | ||
2086 | 1383 | padding = 0 | ||
2087 | 1384 | |||
2088 | 1385 | # Look for the first and last matches of pattern in text. If two different | ||
2089 | 1386 | # matches are found, increase the pattern length. | ||
2090 | 1387 | while (text.find(pattern) != text.rfind(pattern) and (self.Match_MaxBits == | ||
2091 | 1388 | 0 or len(pattern) < self.Match_MaxBits - self.Patch_Margin - | ||
2092 | 1389 | self.Patch_Margin)): | ||
2093 | 1390 | padding += self.Patch_Margin | ||
2094 | 1391 | pattern = text[max(0, patch.start2 - padding) : | ||
2095 | 1392 | patch.start2 + patch.length1 + padding] | ||
2096 | 1393 | # Add one chunk for good luck. | ||
2097 | 1394 | padding += self.Patch_Margin | ||
2098 | 1395 | |||
2099 | 1396 | # Add the prefix. | ||
2100 | 1397 | prefix = text[max(0, patch.start2 - padding) : patch.start2] | ||
2101 | 1398 | if prefix: | ||
2102 | 1399 | patch.diffs[:0] = [(self.DIFF_EQUAL, prefix)] | ||
2103 | 1400 | # Add the suffix. | ||
2104 | 1401 | suffix = text[patch.start2 + patch.length1 : | ||
2105 | 1402 | patch.start2 + patch.length1 + padding] | ||
2106 | 1403 | if suffix: | ||
2107 | 1404 | patch.diffs.append((self.DIFF_EQUAL, suffix)) | ||
2108 | 1405 | |||
2109 | 1406 | # Roll back the start points. | ||
2110 | 1407 | patch.start1 -= len(prefix) | ||
2111 | 1408 | patch.start2 -= len(prefix) | ||
2112 | 1409 | # Extend lengths. | ||
2113 | 1410 | patch.length1 += len(prefix) + len(suffix) | ||
2114 | 1411 | patch.length2 += len(prefix) + len(suffix) | ||
2115 | 1412 | |||
2116 | 1413 | def patch_make(self, a, b=None, c=None): | ||
2117 | 1414 | """Compute a list of patches to turn text1 into text2. | ||
2118 | 1415 | Use diffs if provided, otherwise compute it ourselves. | ||
2119 | 1416 | There are four ways to call this function, depending on what data is | ||
2120 | 1417 | available to the caller: | ||
2121 | 1418 | Method 1: | ||
2122 | 1419 | a = text1, b = text2 | ||
2123 | 1420 | Method 2: | ||
2124 | 1421 | a = diffs | ||
2125 | 1422 | Method 3 (optimal): | ||
2126 | 1423 | a = text1, b = diffs | ||
2127 | 1424 | Method 4 (deprecated, use method 3): | ||
2128 | 1425 | a = text1, b = text2, c = diffs | ||
2129 | 1426 | |||
2130 | 1427 | Args: | ||
2131 | 1428 | a: text1 (methods 1,3,4) or Array of diff tuples for text1 to | ||
2132 | 1429 | text2 (method 2). | ||
2133 | 1430 | b: text2 (methods 1,4) or Array of diff tuples for text1 to | ||
2134 | 1431 | text2 (method 3) or undefined (method 2). | ||
2135 | 1432 | c: Array of diff tuples for text1 to text2 (method 4) or | ||
2136 | 1433 | undefined (methods 1,2,3). | ||
2137 | 1434 | |||
2138 | 1435 | Returns: | ||
2139 | 1436 | Array of Patch objects. | ||
2140 | 1437 | """ | ||
2141 | 1438 | text1 = None | ||
2142 | 1439 | diffs = None | ||
2143 | 1440 | # Note that texts may arrive as 'str' or 'unicode'. | ||
2144 | 1441 | if isinstance(a, basestring) and isinstance(b, basestring) and c is None: | ||
2145 | 1442 | # Method 1: text1, text2 | ||
2146 | 1443 | # Compute diffs from text1 and text2. | ||
2147 | 1444 | text1 = a | ||
2148 | 1445 | diffs = self.diff_main(text1, b, True) | ||
2149 | 1446 | if len(diffs) > 2: | ||
2150 | 1447 | self.diff_cleanupSemantic(diffs) | ||
2151 | 1448 | self.diff_cleanupEfficiency(diffs) | ||
2152 | 1449 | elif isinstance(a, list) and b is None and c is None: | ||
2153 | 1450 | # Method 2: diffs | ||
2154 | 1451 | # Compute text1 from diffs. | ||
2155 | 1452 | diffs = a | ||
2156 | 1453 | text1 = self.diff_text1(diffs) | ||
2157 | 1454 | elif isinstance(a, basestring) and isinstance(b, list) and c is None: | ||
2158 | 1455 | # Method 3: text1, diffs | ||
2159 | 1456 | text1 = a | ||
2160 | 1457 | diffs = b | ||
2161 | 1458 | elif (isinstance(a, basestring) and isinstance(b, basestring) and | ||
2162 | 1459 | isinstance(c, list)): | ||
2163 | 1460 | # Method 4: text1, text2, diffs | ||
2164 | 1461 | # text2 is not used. | ||
2165 | 1462 | text1 = a | ||
2166 | 1463 | diffs = c | ||
2167 | 1464 | else: | ||
2168 | 1465 | raise ValueError("Unknown call format to patch_make.") | ||
2169 | 1466 | |||
2170 | 1467 | if not diffs: | ||
2171 | 1468 | return [] # Get rid of the None case. | ||
2172 | 1469 | patches = [] | ||
2173 | 1470 | patch = patch_obj() | ||
2174 | 1471 | char_count1 = 0 # Number of characters into the text1 string. | ||
2175 | 1472 | char_count2 = 0 # Number of characters into the text2 string. | ||
2176 | 1473 | prepatch_text = text1 # Recreate the patches to determine context info. | ||
2177 | 1474 | postpatch_text = text1 | ||
2178 | 1475 | for x in xrange(len(diffs)): | ||
2179 | 1476 | (diff_type, diff_text) = diffs[x] | ||
2180 | 1477 | if len(patch.diffs) == 0 and diff_type != self.DIFF_EQUAL: | ||
2181 | 1478 | # A new patch starts here. | ||
2182 | 1479 | patch.start1 = char_count1 | ||
2183 | 1480 | patch.start2 = char_count2 | ||
2184 | 1481 | if diff_type == self.DIFF_INSERT: | ||
2185 | 1482 | # Insertion | ||
2186 | 1483 | patch.diffs.append(diffs[x]) | ||
2187 | 1484 | patch.length2 += len(diff_text) | ||
2188 | 1485 | postpatch_text = (postpatch_text[:char_count2] + diff_text + | ||
2189 | 1486 | postpatch_text[char_count2:]) | ||
2190 | 1487 | elif diff_type == self.DIFF_DELETE: | ||
2191 | 1488 | # Deletion. | ||
2192 | 1489 | patch.length1 += len(diff_text) | ||
2193 | 1490 | patch.diffs.append(diffs[x]) | ||
2194 | 1491 | postpatch_text = (postpatch_text[:char_count2] + | ||
2195 | 1492 | postpatch_text[char_count2 + len(diff_text):]) | ||
2196 | 1493 | elif (diff_type == self.DIFF_EQUAL and | ||
2197 | 1494 | len(diff_text) <= 2 * self.Patch_Margin and | ||
2198 | 1495 | len(patch.diffs) != 0 and len(diffs) != x + 1): | ||
2199 | 1496 | # Small equality inside a patch. | ||
2200 | 1497 | patch.diffs.append(diffs[x]) | ||
2201 | 1498 | patch.length1 += len(diff_text) | ||
2202 | 1499 | patch.length2 += len(diff_text) | ||
2203 | 1500 | |||
2204 | 1501 | if (diff_type == self.DIFF_EQUAL and | ||
2205 | 1502 | len(diff_text) >= 2 * self.Patch_Margin): | ||
2206 | 1503 | # Time for a new patch. | ||
2207 | 1504 | if len(patch.diffs) != 0: | ||
2208 | 1505 | self.patch_addContext(patch, prepatch_text) | ||
2209 | 1506 | patches.append(patch) | ||
2210 | 1507 | patch = patch_obj() | ||
2211 | 1508 | # Unlike Unidiff, our patch lists have a rolling context. | ||
2212 | 1509 | # http://code.google.com/p/google-diff-match-patch/wiki/Unidiff | ||
2213 | 1510 | # Update prepatch text & pos to reflect the application of the | ||
2214 | 1511 | # just completed patch. | ||
2215 | 1512 | prepatch_text = postpatch_text | ||
2216 | 1513 | char_count1 = char_count2 | ||
2217 | 1514 | |||
2218 | 1515 | # Update the current character count. | ||
2219 | 1516 | if diff_type != self.DIFF_INSERT: | ||
2220 | 1517 | char_count1 += len(diff_text) | ||
2221 | 1518 | if diff_type != self.DIFF_DELETE: | ||
2222 | 1519 | char_count2 += len(diff_text) | ||
2223 | 1520 | |||
2224 | 1521 | # Pick up the leftover patch if not empty. | ||
2225 | 1522 | if len(patch.diffs) != 0: | ||
2226 | 1523 | self.patch_addContext(patch, prepatch_text) | ||
2227 | 1524 | patches.append(patch) | ||
2228 | 1525 | return patches | ||
2229 | 1526 | |||
2230 | 1527 | def patch_deepCopy(self, patches): | ||
2231 | 1528 | """Given an array of patches, return another array that is identical. | ||
2232 | 1529 | |||
2233 | 1530 | Args: | ||
2234 | 1531 | patches: Array of Patch objects. | ||
2235 | 1532 | |||
2236 | 1533 | Returns: | ||
2237 | 1534 | Array of Patch objects. | ||
2238 | 1535 | """ | ||
2239 | 1536 | patchesCopy = [] | ||
2240 | 1537 | for patch in patches: | ||
2241 | 1538 | patchCopy = patch_obj() | ||
2242 | 1539 | # No need to deep copy the tuples since they are immutable. | ||
2243 | 1540 | patchCopy.diffs = patch.diffs[:] | ||
2244 | 1541 | patchCopy.start1 = patch.start1 | ||
2245 | 1542 | patchCopy.start2 = patch.start2 | ||
2246 | 1543 | patchCopy.length1 = patch.length1 | ||
2247 | 1544 | patchCopy.length2 = patch.length2 | ||
2248 | 1545 | patchesCopy.append(patchCopy) | ||
2249 | 1546 | return patchesCopy | ||
2250 | 1547 | |||
2251 | 1548 | def patch_apply(self, patches, text): | ||
2252 | 1549 | """Merge a set of patches onto the text. Return a patched text, as well | ||
2253 | 1550 | as a list of true/false values indicating which patches were applied. | ||
2254 | 1551 | |||
2255 | 1552 | Args: | ||
2256 | 1553 | patches: Array of Patch objects. | ||
2257 | 1554 | text: Old text. | ||
2258 | 1555 | |||
2259 | 1556 | Returns: | ||
2260 | 1557 | Two element Array, containing the new text and an array of boolean values. | ||
2261 | 1558 | """ | ||
2262 | 1559 | if not patches: | ||
2263 | 1560 | return (text, []) | ||
2264 | 1561 | |||
2265 | 1562 | # Deep copy the patches so that no changes are made to originals. | ||
2266 | 1563 | patches = self.patch_deepCopy(patches) | ||
2267 | 1564 | |||
2268 | 1565 | nullPadding = self.patch_addPadding(patches) | ||
2269 | 1566 | text = nullPadding + text + nullPadding | ||
2270 | 1567 | self.patch_splitMax(patches) | ||
2271 | 1568 | |||
2272 | 1569 | # delta keeps track of the offset between the expected and actual location | ||
2273 | 1570 | # of the previous patch. If there are patches expected at positions 10 and | ||
2274 | 1571 | # 20, but the first patch was found at 12, delta is 2 and the second patch | ||
2275 | 1572 | # has an effective expected position of 22. | ||
2276 | 1573 | delta = 0 | ||
2277 | 1574 | results = [] | ||
2278 | 1575 | for patch in patches: | ||
2279 | 1576 | expected_loc = patch.start2 + delta | ||
2280 | 1577 | text1 = self.diff_text1(patch.diffs) | ||
2281 | 1578 | end_loc = -1 | ||
2282 | 1579 | if len(text1) > self.Match_MaxBits: | ||
2283 | 1580 | # patch_splitMax will only provide an oversized pattern in the case of | ||
2284 | 1581 | # a monster delete. | ||
2285 | 1582 | start_loc = self.match_main(text, text1[:self.Match_MaxBits], | ||
2286 | 1583 | expected_loc) | ||
2287 | 1584 | if start_loc != -1: | ||
2288 | 1585 | end_loc = self.match_main(text, text1[-self.Match_MaxBits:], | ||
2289 | 1586 | expected_loc + len(text1) - self.Match_MaxBits) | ||
2290 | 1587 | if end_loc == -1 or start_loc >= end_loc: | ||
2291 | 1588 | # Can't find valid trailing context. Drop this patch. | ||
2292 | 1589 | start_loc = -1 | ||
2293 | 1590 | else: | ||
2294 | 1591 | start_loc = self.match_main(text, text1, expected_loc) | ||
2295 | 1592 | if start_loc == -1: | ||
2296 | 1593 | # No match found. :( | ||
2297 | 1594 | results.append(False) | ||
2298 | 1595 | # Subtract the delta for this failed patch from subsequent patches. | ||
2299 | 1596 | delta -= patch.length2 - patch.length1 | ||
2300 | 1597 | else: | ||
2301 | 1598 | # Found a match. :) | ||
2302 | 1599 | results.append(True) | ||
2303 | 1600 | delta = start_loc - expected_loc | ||
2304 | 1601 | if end_loc == -1: | ||
2305 | 1602 | text2 = text[start_loc : start_loc + len(text1)] | ||
2306 | 1603 | else: | ||
2307 | 1604 | text2 = text[start_loc : end_loc + self.Match_MaxBits] | ||
2308 | 1605 | if text1 == text2: | ||
2309 | 1606 | # Perfect match, just shove the replacement text in. | ||
2310 | 1607 | text = (text[:start_loc] + self.diff_text2(patch.diffs) + | ||
2311 | 1608 | text[start_loc + len(text1):]) | ||
2312 | 1609 | else: | ||
2313 | 1610 | # Imperfect match. | ||
2314 | 1611 | # Run a diff to get a framework of equivalent indices. | ||
2315 | 1612 | diffs = self.diff_main(text1, text2, False) | ||
2316 | 1613 | if (len(text1) > self.Match_MaxBits and | ||
2317 | 1614 | self.diff_levenshtein(diffs) / float(len(text1)) > | ||
2318 | 1615 | self.Patch_DeleteThreshold): | ||
2319 | 1616 | # The end points match, but the content is unacceptably bad. | ||
2320 | 1617 | results[-1] = False | ||
2321 | 1618 | else: | ||
2322 | 1619 | self.diff_cleanupSemanticLossless(diffs) | ||
2323 | 1620 | index1 = 0 | ||
2324 | 1621 | for (op, data) in patch.diffs: | ||
2325 | 1622 | if op != self.DIFF_EQUAL: | ||
2326 | 1623 | index2 = self.diff_xIndex(diffs, index1) | ||
2327 | 1624 | if op == self.DIFF_INSERT: # Insertion | ||
2328 | 1625 | text = text[:start_loc + index2] + data + text[start_loc + | ||
2329 | 1626 | index2:] | ||
2330 | 1627 | elif op == self.DIFF_DELETE: # Deletion | ||
2331 | 1628 | text = text[:start_loc + index2] + text[start_loc + | ||
2332 | 1629 | self.diff_xIndex(diffs, index1 + len(data)):] | ||
2333 | 1630 | if op != self.DIFF_DELETE: | ||
2334 | 1631 | index1 += len(data) | ||
2335 | 1632 | # Strip the padding off. | ||
2336 | 1633 | text = text[len(nullPadding):-len(nullPadding)] | ||
2337 | 1634 | return (text, results) | ||
2338 | 1635 | |||
2339 | 1636 | def patch_addPadding(self, patches): | ||
2340 | 1637 | """Add some padding on text start and end so that edges can match | ||
2341 | 1638 | something. Intended to be called only from within patch_apply. | ||
2342 | 1639 | |||
2343 | 1640 | Args: | ||
2344 | 1641 | patches: Array of Patch objects. | ||
2345 | 1642 | |||
2346 | 1643 | Returns: | ||
2347 | 1644 | The padding string added to each side. | ||
2348 | 1645 | """ | ||
2349 | 1646 | paddingLength = self.Patch_Margin | ||
2350 | 1647 | nullPadding = "" | ||
2351 | 1648 | for x in xrange(1, paddingLength + 1): | ||
2352 | 1649 | nullPadding += chr(x) | ||
2353 | 1650 | |||
2354 | 1651 | # Bump all the patches forward. | ||
2355 | 1652 | for patch in patches: | ||
2356 | 1653 | patch.start1 += paddingLength | ||
2357 | 1654 | patch.start2 += paddingLength | ||
2358 | 1655 | |||
2359 | 1656 | # Add some padding on start of first diff. | ||
2360 | 1657 | patch = patches[0] | ||
2361 | 1658 | diffs = patch.diffs | ||
2362 | 1659 | if not diffs or diffs[0][0] != self.DIFF_EQUAL: | ||
2363 | 1660 | # Add nullPadding equality. | ||
2364 | 1661 | diffs.insert(0, (self.DIFF_EQUAL, nullPadding)) | ||
2365 | 1662 | patch.start1 -= paddingLength # Should be 0. | ||
2366 | 1663 | patch.start2 -= paddingLength # Should be 0. | ||
2367 | 1664 | patch.length1 += paddingLength | ||
2368 | 1665 | patch.length2 += paddingLength | ||
2369 | 1666 | elif paddingLength > len(diffs[0][1]): | ||
2370 | 1667 | # Grow first equality. | ||
2371 | 1668 | extraLength = paddingLength - len(diffs[0][1]) | ||
2372 | 1669 | newText = nullPadding[len(diffs[0][1]):] + diffs[0][1] | ||
2373 | 1670 | diffs[0] = (diffs[0][0], newText) | ||
2374 | 1671 | patch.start1 -= extraLength | ||
2375 | 1672 | patch.start2 -= extraLength | ||
2376 | 1673 | patch.length1 += extraLength | ||
2377 | 1674 | patch.length2 += extraLength | ||
2378 | 1675 | |||
2379 | 1676 | # Add some padding on end of last diff. | ||
2380 | 1677 | patch = patches[-1] | ||
2381 | 1678 | diffs = patch.diffs | ||
2382 | 1679 | if not diffs or diffs[-1][0] != self.DIFF_EQUAL: | ||
2383 | 1680 | # Add nullPadding equality. | ||
2384 | 1681 | diffs.append((self.DIFF_EQUAL, nullPadding)) | ||
2385 | 1682 | patch.length1 += paddingLength | ||
2386 | 1683 | patch.length2 += paddingLength | ||
2387 | 1684 | elif paddingLength > len(diffs[-1][1]): | ||
2388 | 1685 | # Grow last equality. | ||
2389 | 1686 | extraLength = paddingLength - len(diffs[-1][1]) | ||
2390 | 1687 | newText = diffs[-1][1] + nullPadding[:extraLength] | ||
2391 | 1688 | diffs[-1] = (diffs[-1][0], newText) | ||
2392 | 1689 | patch.length1 += extraLength | ||
2393 | 1690 | patch.length2 += extraLength | ||
2394 | 1691 | |||
2395 | 1692 | return nullPadding | ||
2396 | 1693 | |||
2397 | 1694 | def patch_splitMax(self, patches): | ||
2398 | 1695 | """Look through the patches and break up any which are longer than the | ||
2399 | 1696 | maximum limit of the match algorithm. | ||
2400 | 1697 | Intended to be called only from within patch_apply. | ||
2401 | 1698 | |||
2402 | 1699 | Args: | ||
2403 | 1700 | patches: Array of Patch objects. | ||
2404 | 1701 | """ | ||
2405 | 1702 | patch_size = self.Match_MaxBits | ||
2406 | 1703 | if patch_size == 0: | ||
2407 | 1704 | # Python has the option of not splitting strings due to its ability | ||
2408 | 1705 | # to handle integers of arbitrary precision. | ||
2409 | 1706 | return | ||
2410 | 1707 | for x in xrange(len(patches)): | ||
2411 | 1708 | if patches[x].length1 <= patch_size: | ||
2412 | 1709 | continue | ||
2413 | 1710 | bigpatch = patches[x] | ||
2414 | 1711 | # Remove the big old patch. | ||
2415 | 1712 | del patches[x] | ||
2416 | 1713 | x -= 1 | ||
2417 | 1714 | start1 = bigpatch.start1 | ||
2418 | 1715 | start2 = bigpatch.start2 | ||
2419 | 1716 | precontext = '' | ||
2420 | 1717 | while len(bigpatch.diffs) != 0: | ||
2421 | 1718 | # Create one of several smaller patches. | ||
2422 | 1719 | patch = patch_obj() | ||
2423 | 1720 | empty = True | ||
2424 | 1721 | patch.start1 = start1 - len(precontext) | ||
2425 | 1722 | patch.start2 = start2 - len(precontext) | ||
2426 | 1723 | if precontext: | ||
2427 | 1724 | patch.length1 = patch.length2 = len(precontext) | ||
2428 | 1725 | patch.diffs.append((self.DIFF_EQUAL, precontext)) | ||
2429 | 1726 | |||
2430 | 1727 | while (len(bigpatch.diffs) != 0 and | ||
2431 | 1728 | patch.length1 < patch_size - self.Patch_Margin): | ||
2432 | 1729 | (diff_type, diff_text) = bigpatch.diffs[0] | ||
2433 | 1730 | if diff_type == self.DIFF_INSERT: | ||
2434 | 1731 | # Insertions are harmless. | ||
2435 | 1732 | patch.length2 += len(diff_text) | ||
2436 | 1733 | start2 += len(diff_text) | ||
2437 | 1734 | patch.diffs.append(bigpatch.diffs.pop(0)) | ||
2438 | 1735 | empty = False | ||
2439 | 1736 | elif (diff_type == self.DIFF_DELETE and len(patch.diffs) == 1 and | ||
2440 | 1737 | patch.diffs[0][0] == self.DIFF_EQUAL and | ||
2441 | 1738 | len(diff_text) > 2 * patch_size): | ||
2442 | 1739 | # This is a large deletion. Let it pass in one chunk. | ||
2443 | 1740 | patch.length1 += len(diff_text) | ||
2444 | 1741 | start1 += len(diff_text) | ||
2445 | 1742 | empty = False | ||
2446 | 1743 | patch.diffs.append((diff_type, diff_text)) | ||
2447 | 1744 | del bigpatch.diffs[0] | ||
2448 | 1745 | else: | ||
2449 | 1746 | # Deletion or equality. Only take as much as we can stomach. | ||
2450 | 1747 | diff_text = diff_text[:patch_size - patch.length1 - | ||
2451 | 1748 | self.Patch_Margin] | ||
2452 | 1749 | patch.length1 += len(diff_text) | ||
2453 | 1750 | start1 += len(diff_text) | ||
2454 | 1751 | if diff_type == self.DIFF_EQUAL: | ||
2455 | 1752 | patch.length2 += len(diff_text) | ||
2456 | 1753 | start2 += len(diff_text) | ||
2457 | 1754 | else: | ||
2458 | 1755 | empty = False | ||
2459 | 1756 | |||
2460 | 1757 | patch.diffs.append((diff_type, diff_text)) | ||
2461 | 1758 | if diff_text == bigpatch.diffs[0][1]: | ||
2462 | 1759 | del bigpatch.diffs[0] | ||
2463 | 1760 | else: | ||
2464 | 1761 | bigpatch.diffs[0] = (bigpatch.diffs[0][0], | ||
2465 | 1762 | bigpatch.diffs[0][1][len(diff_text):]) | ||
2466 | 1763 | |||
2467 | 1764 | # Compute the head context for the next patch. | ||
2468 | 1765 | precontext = self.diff_text2(patch.diffs) | ||
2469 | 1766 | precontext = precontext[-self.Patch_Margin:] | ||
2470 | 1767 | # Append the end context for this patch. | ||
2471 | 1768 | postcontext = self.diff_text1(bigpatch.diffs)[:self.Patch_Margin] | ||
2472 | 1769 | if postcontext: | ||
2473 | 1770 | patch.length1 += len(postcontext) | ||
2474 | 1771 | patch.length2 += len(postcontext) | ||
2475 | 1772 | if len(patch.diffs) != 0 and patch.diffs[-1][0] == self.DIFF_EQUAL: | ||
2476 | 1773 | patch.diffs[-1] = (self.DIFF_EQUAL, patch.diffs[-1][1] + | ||
2477 | 1774 | postcontext) | ||
2478 | 1775 | else: | ||
2479 | 1776 | patch.diffs.append((self.DIFF_EQUAL, postcontext)) | ||
2480 | 1777 | |||
2481 | 1778 | if not empty: | ||
2482 | 1779 | x += 1 | ||
2483 | 1780 | patches.insert(x, patch) | ||
2484 | 1781 | |||
2485 | 1782 | def patch_toText(self, patches): | ||
2486 | 1783 | """Take a list of patches and return a textual representation. | ||
2487 | 1784 | |||
2488 | 1785 | Args: | ||
2489 | 1786 | patches: Array of Patch objects. | ||
2490 | 1787 | |||
2491 | 1788 | Returns: | ||
2492 | 1789 | Text representation of patches. | ||
2493 | 1790 | """ | ||
2494 | 1791 | text = [] | ||
2495 | 1792 | for patch in patches: | ||
2496 | 1793 | text.append(str(patch)) | ||
2497 | 1794 | return "".join(text) | ||
2498 | 1795 | |||
2499 | 1796 | def patch_fromText(self, textline): | ||
2500 | 1797 | """Parse a textual representation of patches and return a list of patch | ||
2501 | 1798 | objects. | ||
2502 | 1799 | |||
2503 | 1800 | Args: | ||
2504 | 1801 | textline: Text representation of patches. | ||
2505 | 1802 | |||
2506 | 1803 | Returns: | ||
2507 | 1804 | Array of Patch objects. | ||
2508 | 1805 | |||
2509 | 1806 | Raises: | ||
2510 | 1807 | ValueError: If invalid input. | ||
2511 | 1808 | """ | ||
2512 | 1809 | if type(textline) == unicode: | ||
2513 | 1810 | # Patches should be composed of a subset of ascii chars, Unicode not | ||
2514 | 1811 | # required. If this encode raises UnicodeEncodeError, patch is invalid. | ||
2515 | 1812 | textline = textline.encode("ascii") | ||
2516 | 1813 | patches = [] | ||
2517 | 1814 | if not textline: | ||
2518 | 1815 | return patches | ||
2519 | 1816 | text = textline.split('\n') | ||
2520 | 1817 | while len(text) != 0: | ||
2521 | 1818 | m = re.match("^@@ -(\d+),?(\d*) \+(\d+),?(\d*) @@$", text[0]) | ||
2522 | 1819 | if not m: | ||
2523 | 1820 | raise ValueError("Invalid patch string: " + text[0]) | ||
2524 | 1821 | patch = patch_obj() | ||
2525 | 1822 | patches.append(patch) | ||
2526 | 1823 | patch.start1 = int(m.group(1)) | ||
2527 | 1824 | if m.group(2) == '': | ||
2528 | 1825 | patch.start1 -= 1 | ||
2529 | 1826 | patch.length1 = 1 | ||
2530 | 1827 | elif m.group(2) == '0': | ||
2531 | 1828 | patch.length1 = 0 | ||
2532 | 1829 | else: | ||
2533 | 1830 | patch.start1 -= 1 | ||
2534 | 1831 | patch.length1 = int(m.group(2)) | ||
2535 | 1832 | |||
2536 | 1833 | patch.start2 = int(m.group(3)) | ||
2537 | 1834 | if m.group(4) == '': | ||
2538 | 1835 | patch.start2 -= 1 | ||
2539 | 1836 | patch.length2 = 1 | ||
2540 | 1837 | elif m.group(4) == '0': | ||
2541 | 1838 | patch.length2 = 0 | ||
2542 | 1839 | else: | ||
2543 | 1840 | patch.start2 -= 1 | ||
2544 | 1841 | patch.length2 = int(m.group(4)) | ||
2545 | 1842 | |||
2546 | 1843 | del text[0] | ||
2547 | 1844 | |||
2548 | 1845 | while len(text) != 0: | ||
2549 | 1846 | if text[0]: | ||
2550 | 1847 | sign = text[0][0] | ||
2551 | 1848 | else: | ||
2552 | 1849 | sign = '' | ||
2553 | 1850 | line = urllib.unquote(text[0][1:]) | ||
2554 | 1851 | line = line.decode("utf-8") | ||
2555 | 1852 | if sign == '+': | ||
2556 | 1853 | # Insertion. | ||
2557 | 1854 | patch.diffs.append((self.DIFF_INSERT, line)) | ||
2558 | 1855 | elif sign == '-': | ||
2559 | 1856 | # Deletion. | ||
2560 | 1857 | patch.diffs.append((self.DIFF_DELETE, line)) | ||
2561 | 1858 | elif sign == ' ': | ||
2562 | 1859 | # Minor equality. | ||
2563 | 1860 | patch.diffs.append((self.DIFF_EQUAL, line)) | ||
2564 | 1861 | elif sign == '@': | ||
2565 | 1862 | # Start of next patch. | ||
2566 | 1863 | break | ||
2567 | 1864 | elif sign == '': | ||
2568 | 1865 | # Blank line? Whatever. | ||
2569 | 1866 | pass | ||
2570 | 1867 | else: | ||
2571 | 1868 | # WTF? | ||
2572 | 1869 | raise ValueError("Invalid patch mode: '%s'\n%s" % (sign, line)) | ||
2573 | 1870 | del text[0] | ||
2574 | 1871 | return patches | ||
2575 | 1872 | |||
2576 | 1873 | |||
2577 | 1874 | class patch_obj: | ||
2578 | 1875 | """Class representing one patch operation. | ||
2579 | 1876 | """ | ||
2580 | 1877 | |||
2581 | 1878 | def __init__(self): | ||
2582 | 1879 | """Initializes with an empty list of diffs. | ||
2583 | 1880 | """ | ||
2584 | 1881 | self.diffs = [] | ||
2585 | 1882 | self.start1 = None | ||
2586 | 1883 | self.start2 = None | ||
2587 | 1884 | self.length1 = 0 | ||
2588 | 1885 | self.length2 = 0 | ||
2589 | 1886 | |||
2590 | 1887 | def __str__(self): | ||
2591 | 1888 | """Emmulate GNU diff's format. | ||
2592 | 1889 | Header: @@ -382,8 +481,9 @@ | ||
2593 | 1890 | Indicies are printed as 1-based, not 0-based. | ||
2594 | 1891 | |||
2595 | 1892 | Returns: | ||
2596 | 1893 | The GNU diff string. | ||
2597 | 1894 | """ | ||
2598 | 1895 | if self.length1 == 0: | ||
2599 | 1896 | coords1 = str(self.start1) + ",0" | ||
2600 | 1897 | elif self.length1 == 1: | ||
2601 | 1898 | coords1 = str(self.start1 + 1) | ||
2602 | 1899 | else: | ||
2603 | 1900 | coords1 = str(self.start1 + 1) + "," + str(self.length1) | ||
2604 | 1901 | if self.length2 == 0: | ||
2605 | 1902 | coords2 = str(self.start2) + ",0" | ||
2606 | 1903 | elif self.length2 == 1: | ||
2607 | 1904 | coords2 = str(self.start2 + 1) | ||
2608 | 1905 | else: | ||
2609 | 1906 | coords2 = str(self.start2 + 1) + "," + str(self.length2) | ||
2610 | 1907 | text = ["@@ -", coords1, " +", coords2, " @@\n"] | ||
2611 | 1908 | # Escape the body of the patch with %xx notation. | ||
2612 | 1909 | for (op, data) in self.diffs: | ||
2613 | 1910 | if op == diff_match_patch.DIFF_INSERT: | ||
2614 | 1911 | text.append("+") | ||
2615 | 1912 | elif op == diff_match_patch.DIFF_DELETE: | ||
2616 | 1913 | text.append("-") | ||
2617 | 1914 | elif op == diff_match_patch.DIFF_EQUAL: | ||
2618 | 1915 | text.append(" ") | ||
2619 | 1916 | # High ascii will raise UnicodeDecodeError. Use Unicode instead. | ||
2620 | 1917 | data = data.encode("utf-8") | ||
2621 | 1918 | text.append(urllib.quote(data, "!~*'();/?:@&=+$,# ") + "\n") | ||
2622 | 1919 | return "".join(text) | ||
2623 | 0 | 1920 | ||
2624 | === added file 'charmtools/compose/fetchers.py' | |||
2625 | --- charmtools/compose/fetchers.py 1970-01-01 00:00:00 +0000 | |||
2626 | +++ charmtools/compose/fetchers.py 2015-08-31 19:32:56 +0000 | |||
2627 | @@ -0,0 +1,117 @@ | |||
2628 | 1 | import re | ||
2629 | 2 | import tempfile | ||
2630 | 3 | import os | ||
2631 | 4 | |||
2632 | 5 | import requests | ||
2633 | 6 | from bundletester import fetchers | ||
2634 | 7 | from bundletester.fetchers import (git, # noqa | ||
2635 | 8 | Fetcher, | ||
2636 | 9 | get_fetcher, | ||
2637 | 10 | FetchError) | ||
2638 | 11 | |||
2639 | 12 | from path import path | ||
2640 | 13 | |||
2641 | 14 | |||
2642 | 15 | class RepoFetcher(fetchers.LocalFetcher): | ||
2643 | 16 | @classmethod | ||
2644 | 17 | def can_fetch(cls, url): | ||
2645 | 18 | search_path = [os.getcwd(), os.environ.get("JUJU_REPOSITORY", ".")] | ||
2646 | 19 | cp = os.environ.get("COMPOSER_PATH") | ||
2647 | 20 | if cp: | ||
2648 | 21 | search_path.extend(cp.split(":")) | ||
2649 | 22 | for part in search_path: | ||
2650 | 23 | p = (path(part) / url).normpath() | ||
2651 | 24 | if p.exists(): | ||
2652 | 25 | return dict(path=p) | ||
2653 | 26 | return {} | ||
2654 | 27 | |||
2655 | 28 | fetchers.FETCHERS.insert(0, RepoFetcher) | ||
2656 | 29 | |||
2657 | 30 | |||
2658 | 31 | class InterfaceFetcher(fetchers.LocalFetcher): | ||
2659 | 32 | # XXX: When hosted somewhere, fix this | ||
2660 | 33 | INTERFACE_DOMAIN = "http://interfaces.juju.solutions" | ||
2661 | 34 | NAMESPACE = "interface" | ||
2662 | 35 | ENVIRON = "INTERFACE_PATH" | ||
2663 | 36 | OPTIONAL_PREFIX = "juju-relation-" | ||
2664 | 37 | ENDPOINT = "/api/v1/interface" | ||
2665 | 38 | |||
2666 | 39 | @classmethod | ||
2667 | 40 | def can_fetch(cls, url): | ||
2668 | 41 | # Search local path first, then | ||
2669 | 42 | # the interface webservice | ||
2670 | 43 | if url.startswith("{}:".format(cls.NAMESPACE)): | ||
2671 | 44 | url = url[len(cls.NAMESPACE) + 1:] | ||
2672 | 45 | search_path = [os.environ.get("JUJU_REPOSITORY", ".")] | ||
2673 | 46 | cp = os.environ.get(cls.ENVIRON) | ||
2674 | 47 | if cp: | ||
2675 | 48 | search_path.extend(cp.split(os.pathsep)) | ||
2676 | 49 | for part in search_path: | ||
2677 | 50 | p = (path(part) / url).normpath() | ||
2678 | 51 | if p.exists(): | ||
2679 | 52 | return dict(path=p) | ||
2680 | 53 | |||
2681 | 54 | choices = [url] | ||
2682 | 55 | if url.startswith(cls.OPTIONAL_PREFIX): | ||
2683 | 56 | choices.append(url[len(cls.OPTIONAL_PREFIX):]) | ||
2684 | 57 | for choice in choices: | ||
2685 | 58 | uri = "%s%s/%s/" % ( | ||
2686 | 59 | cls.INTERFACE_DOMAIN, cls.ENDPOINT, choice) | ||
2687 | 60 | try: | ||
2688 | 61 | result = requests.get(uri) | ||
2689 | 62 | except: | ||
2690 | 63 | result = None | ||
2691 | 64 | if result and result.ok: | ||
2692 | 65 | result = result.json() | ||
2693 | 66 | if "repo" in result: | ||
2694 | 67 | return result | ||
2695 | 68 | return {} | ||
2696 | 69 | |||
2697 | 70 | def fetch(self, dir_): | ||
2698 | 71 | if hasattr(self, "path"): | ||
2699 | 72 | return super(InterfaceFetcher, self).fetch(dir_) | ||
2700 | 73 | elif hasattr(self, "repo"): | ||
2701 | 74 | # use the github fetcher for now | ||
2702 | 75 | u = self.url[10:] | ||
2703 | 76 | f = get_fetcher(self.repo) | ||
2704 | 77 | if hasattr(f, "repo"): | ||
2705 | 78 | basename = path(f.repo).name.splitext()[0] | ||
2706 | 79 | else: | ||
2707 | 80 | basename = u | ||
2708 | 81 | res = f.fetch(dir_) | ||
2709 | 82 | target = dir_ / basename | ||
2710 | 83 | if res != target: | ||
2711 | 84 | target.rmtree_p() | ||
2712 | 85 | path(res).rename(target) | ||
2713 | 86 | return target | ||
2714 | 87 | |||
2715 | 88 | |||
2716 | 89 | fetchers.FETCHERS.insert(0, InterfaceFetcher) | ||
2717 | 90 | |||
2718 | 91 | |||
2719 | 92 | class LayerFetcher(InterfaceFetcher): | ||
2720 | 93 | INTERFACE_DOMAIN = "http://interfaces.juju.solutions" | ||
2721 | 94 | NAMESPACE = "layer" | ||
2722 | 95 | ENVIRON = "COMPOSER_PATH" | ||
2723 | 96 | OPTIONAL_PREFIX = "juju-layer-" | ||
2724 | 97 | ENDPOINT = "/api/v1/layer" | ||
2725 | 98 | |||
2726 | 99 | fetchers.FETCHERS.insert(0, LayerFetcher) | ||
2727 | 100 | |||
2728 | 101 | |||
2729 | 102 | class LaunchpadGitFetcher(Fetcher): | ||
2730 | 103 | # XXX: this should be upstreamed | ||
2731 | 104 | MATCH = re.compile(r""" | ||
2732 | 105 | ^(git:|https)?://git.launchpad.net/ | ||
2733 | 106 | (?P<repo>[^@]*)(@(?P<revision>.*))?$ | ||
2734 | 107 | """, re.VERBOSE) | ||
2735 | 108 | |||
2736 | 109 | def fetch(self, dir_): | ||
2737 | 110 | dir_ = tempfile.mkdtemp(dir=dir_) | ||
2738 | 111 | url = 'https://git.launchpad.net/' + self.repo | ||
2739 | 112 | git('clone {} {}'.format(url, dir_)) | ||
2740 | 113 | if self.revision: | ||
2741 | 114 | git('checkout {}'.format(self.revision), cwd=dir_) | ||
2742 | 115 | return dir_ | ||
2743 | 116 | |||
2744 | 117 | fetchers.FETCHERS.append(LaunchpadGitFetcher) | ||
2745 | 0 | 118 | ||
2746 | === added file 'charmtools/compose/inspector.py' | |||
2747 | --- charmtools/compose/inspector.py 1970-01-01 00:00:00 +0000 | |||
2748 | +++ charmtools/compose/inspector.py 2015-08-31 19:32:56 +0000 | |||
2749 | @@ -0,0 +1,101 @@ | |||
2750 | 1 | # coding=utf-8 | ||
2751 | 2 | import json | ||
2752 | 3 | from ruamel import yaml | ||
2753 | 4 | from charmtools.compose import config | ||
2754 | 5 | from charmtools import utils | ||
2755 | 6 | |||
2756 | 7 | theme = { | ||
2757 | 8 | 0: "normal", | ||
2758 | 9 | 1: "green", | ||
2759 | 10 | 2: "cyan", | ||
2760 | 11 | 3: "magenta", | ||
2761 | 12 | 4: "yellow", | ||
2762 | 13 | 5: "red", | ||
2763 | 14 | } | ||
2764 | 15 | |||
2765 | 16 | |||
2766 | 17 | def scan_for(col, cur, depth): | ||
2767 | 18 | for e, (rel, d) in col[cur:]: | ||
2768 | 19 | if d and d == depth: | ||
2769 | 20 | return True | ||
2770 | 21 | return False | ||
2771 | 22 | |||
2772 | 23 | |||
2773 | 24 | def get_prefix(walk, cur, depth, next_depth): | ||
2774 | 25 | guide = [] | ||
2775 | 26 | for i in range(depth): | ||
2776 | 27 | # scan forward in walk from i seeing if a subsequent | ||
2777 | 28 | # entry happens at each depth | ||
2778 | 29 | if scan_for(walk, cur, i): | ||
2779 | 30 | guide.append(" │ ") | ||
2780 | 31 | else: | ||
2781 | 32 | guide.append(" ") | ||
2782 | 33 | if depth == next_depth: | ||
2783 | 34 | prefix = " ├─── " | ||
2784 | 35 | else: | ||
2785 | 36 | prefix = " └─── " | ||
2786 | 37 | return "{}{}".format("".join(guide), prefix) | ||
2787 | 38 | |||
2788 | 39 | |||
2789 | 40 | def inspect(charm): | ||
2790 | 41 | tw = utils.TermWriter() | ||
2791 | 42 | manp = charm / ".composer.manifest" | ||
2792 | 43 | comp = charm / "composer.yaml" | ||
2793 | 44 | if not manp.exists() or not comp.exists(): | ||
2794 | 45 | return | ||
2795 | 46 | manifest = json.loads(manp.text()) | ||
2796 | 47 | composer = yaml.load(comp.open()) | ||
2797 | 48 | a, c, d = utils.delta_signatures(manp) | ||
2798 | 49 | |||
2799 | 50 | # ordered list of layers used for legend | ||
2800 | 51 | layers = list(manifest['layers']) | ||
2801 | 52 | |||
2802 | 53 | def get_depth(e): | ||
2803 | 54 | rel = e.relpath(charm) | ||
2804 | 55 | depth = len(rel.splitall()) - 2 | ||
2805 | 56 | return rel, depth | ||
2806 | 57 | |||
2807 | 58 | def get_suffix(rel): | ||
2808 | 59 | suffix = "" | ||
2809 | 60 | if rel in a: | ||
2810 | 61 | suffix = "+" | ||
2811 | 62 | elif rel in c: | ||
2812 | 63 | suffix = "*" | ||
2813 | 64 | return suffix | ||
2814 | 65 | |||
2815 | 66 | def get_color(rel): | ||
2816 | 67 | # name of layer this belongs to | ||
2817 | 68 | color = tw.term.normal | ||
2818 | 69 | if rel in manifest['signatures']: | ||
2819 | 70 | layer = manifest['signatures'][rel][0] | ||
2820 | 71 | layer_key = layers.index(layer) | ||
2821 | 72 | color = getattr(tw, theme.get(layer_key, "normal")) | ||
2822 | 73 | else: | ||
2823 | 74 | if entry.isdir(): | ||
2824 | 75 | color = tw.blue | ||
2825 | 76 | return color | ||
2826 | 77 | |||
2827 | 78 | tw.write("Inspect %s\n" % composer["is"]) | ||
2828 | 79 | for layer in layers: | ||
2829 | 80 | tw.write("# {color}{layer}{t.normal}\n", | ||
2830 | 81 | color=getattr(tw, theme.get( | ||
2831 | 82 | layers.index(layer), "normal")), | ||
2832 | 83 | layer=layer) | ||
2833 | 84 | tw.write("\n") | ||
2834 | 85 | tw.write("{t.blue}{target}{t.normal}\n", target=charm) | ||
2835 | 86 | |||
2836 | 87 | ignorer = utils.ignore_matcher(config.DEFAULT_IGNORES) | ||
2837 | 88 | walk = sorted(utils.walk(charm, get_depth), | ||
2838 | 89 | key=lambda x: x[1][0]) | ||
2839 | 90 | for i in range(len(walk) - 1): | ||
2840 | 91 | entry, (rel, depth) = walk[i] | ||
2841 | 92 | nEnt, (nrel, ndepth) = walk[i + 1] | ||
2842 | 93 | if not ignorer(rel): | ||
2843 | 94 | continue | ||
2844 | 95 | |||
2845 | 96 | tw.write("{prefix}{layerColor}{entry} " | ||
2846 | 97 | "{t.bold}{suffix}{t.normal}\n", | ||
2847 | 98 | prefix=get_prefix(walk, i, depth, ndepth), | ||
2848 | 99 | layerColor=get_color(rel), | ||
2849 | 100 | suffix=get_suffix(rel), | ||
2850 | 101 | entry=rel.name) | ||
2851 | 0 | 102 | ||
2852 | === added file 'charmtools/compose/tactics.py' | |||
2853 | --- charmtools/compose/tactics.py 1970-01-01 00:00:00 +0000 | |||
2854 | +++ charmtools/compose/tactics.py 2015-08-31 19:32:56 +0000 | |||
2855 | @@ -0,0 +1,449 @@ | |||
2856 | 1 | import logging | ||
2857 | 2 | import json | ||
2858 | 3 | from ruamel import yaml | ||
2859 | 4 | |||
2860 | 5 | from path import path | ||
2861 | 6 | from charmtools import utils | ||
2862 | 7 | |||
2863 | 8 | log = logging.getLogger(__name__) | ||
2864 | 9 | |||
2865 | 10 | |||
2866 | 11 | class Tactic(object): | ||
2867 | 12 | """ | ||
2868 | 13 | Tactics are first considered in the context of the config layer being | ||
2869 | 14 | called the config layer will attempt to (using its author provided info) | ||
2870 | 15 | create a tactic for a given file. That will later be intersected with any | ||
2871 | 16 | later layers to create a final single plan for each element of the output | ||
2872 | 17 | charm. | ||
2873 | 18 | |||
2874 | 19 | Callable that will implement some portion of the charm composition | ||
2875 | 20 | Subclasses should implement __str__ and __call__ which should take whatever | ||
2876 | 21 | actions are needed. | ||
2877 | 22 | """ | ||
2878 | 23 | kind = "static" # used in signatures | ||
2879 | 24 | |||
2880 | 25 | def __init__(self, entity, current, target, config): | ||
2881 | 26 | self.entity = entity | ||
2882 | 27 | self._current = current | ||
2883 | 28 | self._target = target | ||
2884 | 29 | self._raw_data = None | ||
2885 | 30 | self._config = config | ||
2886 | 31 | |||
2887 | 32 | def __call__(self): | ||
2888 | 33 | raise NotImplementedError | ||
2889 | 34 | |||
2890 | 35 | def __str__(self): | ||
2891 | 36 | return "{}: {} -> {}".format( | ||
2892 | 37 | self.__class__.__name__, self.entity, self.target_file) | ||
2893 | 38 | |||
2894 | 39 | @property | ||
2895 | 40 | def current(self): | ||
2896 | 41 | """The file in the current layer under consideration""" | ||
2897 | 42 | return self._current | ||
2898 | 43 | |||
2899 | 44 | @property | ||
2900 | 45 | def target(self): | ||
2901 | 46 | """The target (final) layer.""" | ||
2902 | 47 | return self._target | ||
2903 | 48 | |||
2904 | 49 | @property | ||
2905 | 50 | def relpath(self): | ||
2906 | 51 | return self.entity.relpath(self.current.directory) | ||
2907 | 52 | |||
2908 | 53 | @property | ||
2909 | 54 | def target_file(self): | ||
2910 | 55 | target = self.target.directory / self.relpath | ||
2911 | 56 | return target | ||
2912 | 57 | |||
2913 | 58 | @property | ||
2914 | 59 | def layer_name(self): | ||
2915 | 60 | return self.current.directory.name | ||
2916 | 61 | |||
2917 | 62 | @property | ||
2918 | 63 | def repo_path(self): | ||
2919 | 64 | return path("/".join(self.current.directory.splitall()[-2:])) | ||
2920 | 65 | |||
2921 | 66 | @property | ||
2922 | 67 | def config(self): | ||
2923 | 68 | # Return the config of the layer *above* you | ||
2924 | 69 | # as that is the one that controls your compositing | ||
2925 | 70 | return self._config | ||
2926 | 71 | |||
2927 | 72 | def combine(self, existing): | ||
2928 | 73 | """Produce a tactic informed by the last tactic for an entry. | ||
2929 | 74 | This is when a rule in a higher level charm overrode something in | ||
2930 | 75 | one of its bases for example.""" | ||
2931 | 76 | return self | ||
2932 | 77 | |||
2933 | 78 | @classmethod | ||
2934 | 79 | def trigger(cls, relpath): | ||
2935 | 80 | """Should the rule trigger for a given path object""" | ||
2936 | 81 | return False | ||
2937 | 82 | |||
2938 | 83 | def sign(self): | ||
2939 | 84 | """return sign in the form {relpath: (origin layer, SHA256)} | ||
2940 | 85 | """ | ||
2941 | 86 | target = self.target_file | ||
2942 | 87 | sig = {} | ||
2943 | 88 | if target.exists() and target.isfile(): | ||
2944 | 89 | sig[self.relpath] = (self.current.url, | ||
2945 | 90 | self.kind, | ||
2946 | 91 | utils.sign(self.target_file)) | ||
2947 | 92 | return sig | ||
2948 | 93 | |||
2949 | 94 | def lint(self): | ||
2950 | 95 | return True | ||
2951 | 96 | |||
2952 | 97 | def read(self): | ||
2953 | 98 | return None | ||
2954 | 99 | |||
2955 | 100 | def build(self): | ||
2956 | 101 | pass | ||
2957 | 102 | |||
2958 | 103 | |||
2959 | 104 | class ExactMatch(object): | ||
2960 | 105 | FILENAME = None | ||
2961 | 106 | |||
2962 | 107 | @classmethod | ||
2963 | 108 | def trigger(cls, relpath): | ||
2964 | 109 | return cls.FILENAME == relpath | ||
2965 | 110 | |||
2966 | 111 | |||
2967 | 112 | class CopyTactic(Tactic): | ||
2968 | 113 | def __call__(self): | ||
2969 | 114 | if self.entity.isdir(): | ||
2970 | 115 | return | ||
2971 | 116 | should_ignore = utils.ignore_matcher(self.target.config.ignores) | ||
2972 | 117 | if not should_ignore(self.relpath): | ||
2973 | 118 | return | ||
2974 | 119 | target = self.target_file | ||
2975 | 120 | log.debug("Copying %s: %s", self.layer_name, target) | ||
2976 | 121 | # Ensure the path exists | ||
2977 | 122 | target.dirname().makedirs_p() | ||
2978 | 123 | if (self.entity != target) and not target.exists() \ | ||
2979 | 124 | or not self.entity.samefile(target): | ||
2980 | 125 | data = self.read() | ||
2981 | 126 | if data: | ||
2982 | 127 | target.write_bytes(data) | ||
2983 | 128 | self.entity.copymode(target) | ||
2984 | 129 | else: | ||
2985 | 130 | self.entity.copy2(target) | ||
2986 | 131 | |||
2987 | 132 | def __str__(self): | ||
2988 | 133 | return "Copy {}".format(self.entity) | ||
2989 | 134 | |||
2990 | 135 | @classmethod | ||
2991 | 136 | def trigger(cls, relpath): | ||
2992 | 137 | return True | ||
2993 | 138 | |||
2994 | 139 | |||
2995 | 140 | class InterfaceCopy(Tactic): | ||
2996 | 141 | def __init__(self, interface, relation_name, target, config): | ||
2997 | 142 | self.interface = interface | ||
2998 | 143 | self.relation_name = relation_name | ||
2999 | 144 | self._target = target | ||
3000 | 145 | self._config = config | ||
3001 | 146 | |||
3002 | 147 | @property | ||
3003 | 148 | def target(self): | ||
3004 | 149 | return self._target / "hooks/relations" / self.interface.name | ||
3005 | 150 | |||
3006 | 151 | def __call__(self): | ||
3007 | 152 | # copy the entire tree into the | ||
3008 | 153 | # hooks/relations/<interface> | ||
3009 | 154 | # directory | ||
3010 | 155 | log.debug("Copying Interface %s: %s", | ||
3011 | 156 | self.interface.name, self.target) | ||
3012 | 157 | # Ensure the path exists | ||
3013 | 158 | if self.target.exists(): | ||
3014 | 159 | # XXX: fix this to do actual updates | ||
3015 | 160 | return | ||
3016 | 161 | ignorer = utils.ignore_matcher(self.config.ignores) | ||
3017 | 162 | for entity, _ in utils.walk(self.interface.directory, | ||
3018 | 163 | lambda x: True, | ||
3019 | 164 | matcher=ignorer, | ||
3020 | 165 | kind="files"): | ||
3021 | 166 | target = entity.relpath(self.interface.directory) | ||
3022 | 167 | target = (self.target / target).normpath() | ||
3023 | 168 | target.parent.makedirs_p() | ||
3024 | 169 | entity.copy2(target) | ||
3025 | 170 | init = self.target / "__init__.py" | ||
3026 | 171 | if not init.exists(): | ||
3027 | 172 | # ensure we can import from here directly | ||
3028 | 173 | init.touch() | ||
3029 | 174 | |||
3030 | 175 | def __str__(self): | ||
3031 | 176 | return "Copy Interface {}".format(self.interface.name) | ||
3032 | 177 | |||
3033 | 178 | def sign(self): | ||
3034 | 179 | """return sign in the form {relpath: (origin layer, SHA256)} | ||
3035 | 180 | """ | ||
3036 | 181 | sigs = {} | ||
3037 | 182 | for entry, sig in utils.walk(self.target, | ||
3038 | 183 | utils.sign, kind="files"): | ||
3039 | 184 | relpath = entry.relpath(self._target.directory) | ||
3040 | 185 | sigs[relpath] = (self.interface.url, "static", sig) | ||
3041 | 186 | return sigs | ||
3042 | 187 | |||
3043 | 188 | def lint(self): | ||
3044 | 189 | for entry in self.interface.directory.walkfiles(): | ||
3045 | 190 | if entry.splitext()[1] != ".py": | ||
3046 | 191 | continue | ||
3047 | 192 | relpath = entry.relpath(self._target.directory) | ||
3048 | 193 | target = self._target.directory / relpath | ||
3049 | 194 | if not target.exists(): | ||
3050 | 195 | continue | ||
3051 | 196 | return utils.delta_python_dump(entry, target, | ||
3052 | 197 | from_name=relpath) | ||
3053 | 198 | |||
3054 | 199 | |||
3055 | 200 | class InterfaceBind(InterfaceCopy): | ||
3056 | 201 | def __init__(self, interface, relation_name, kind, target, config): | ||
3057 | 202 | self.interface = interface | ||
3058 | 203 | self.relation_name = relation_name | ||
3059 | 204 | self.kind = kind | ||
3060 | 205 | self._target = target | ||
3061 | 206 | self._config = config | ||
3062 | 207 | |||
3063 | 208 | DEFAULT_BINDING = """#!/usr/bin/env python | ||
3064 | 209 | |||
3065 | 210 | # Load modules from $CHARM_DIR/lib | ||
3066 | 211 | import sys | ||
3067 | 212 | sys.path.append('lib') | ||
3068 | 213 | |||
3069 | 214 | # This will load and run the appropriate @hook and other decorated | ||
3070 | 215 | # handlers from $CHARM_DIR/reactive, $CHARM_DIR/hooks/reactive, | ||
3071 | 216 | # and $CHARM_DIR/hooks/relations. | ||
3072 | 217 | # | ||
3073 | 218 | # See https://jujucharms.com/docs/stable/getting-started-with-charms-reactive | ||
3074 | 219 | # for more information on this pattern. | ||
3075 | 220 | from charms.reactive import main | ||
3076 | 221 | main('{}') | ||
3077 | 222 | """ | ||
3078 | 223 | |||
3079 | 224 | def __call__(self): | ||
3080 | 225 | for hook in ['joined', 'changed', 'broken', 'departed']: | ||
3081 | 226 | target = self._target / "hooks" / "{}-relation-{}".format( | ||
3082 | 227 | self.relation_name, hook) | ||
3083 | 228 | if target.exists(): | ||
3084 | 229 | # XXX: warn | ||
3085 | 230 | continue | ||
3086 | 231 | target.parent.makedirs_p() | ||
3087 | 232 | target.write_text(self.DEFAULT_BINDING.format(self.relation_name)) | ||
3088 | 233 | target.chmod(0755) | ||
3089 | 234 | |||
3090 | 235 | def sign(self): | ||
3091 | 236 | """return sign in the form {relpath: (origin layer, SHA256)} | ||
3092 | 237 | """ | ||
3093 | 238 | sigs = {} | ||
3094 | 239 | for hook in ['joined', 'changed', 'broken', 'departed']: | ||
3095 | 240 | target = self._target / "hooks" / "{}-relation-{}".format( | ||
3096 | 241 | self.relation_name, hook) | ||
3097 | 242 | rel = target.relpath(self._target.directory) | ||
3098 | 243 | sigs[rel] = (self.interface.url, | ||
3099 | 244 | "dynamic", | ||
3100 | 245 | utils.sign(target)) | ||
3101 | 246 | return sigs | ||
3102 | 247 | |||
3103 | 248 | def __str__(self): | ||
3104 | 249 | return "Bind Interface {}".format(self.interface.name) | ||
3105 | 250 | |||
3106 | 251 | |||
3107 | 252 | class ManifestTactic(ExactMatch, Tactic): | ||
3108 | 253 | FILENAME = ".composer.manifest" | ||
3109 | 254 | |||
3110 | 255 | def __call__(self): | ||
3111 | 256 | # Don't copy manifests, they are regenerated | ||
3112 | 257 | pass | ||
3113 | 258 | |||
3114 | 259 | |||
3115 | 260 | class SerializedTactic(ExactMatch, Tactic): | ||
3116 | 261 | kind = "dynamic" | ||
3117 | 262 | |||
3118 | 263 | def __init__(self, *args, **kwargs): | ||
3119 | 264 | super(SerializedTactic, self).__init__(*args, **kwargs) | ||
3120 | 265 | self.data = None | ||
3121 | 266 | |||
3122 | 267 | def combine(self, existing): | ||
3123 | 268 | # Invoke the previous tactic | ||
3124 | 269 | existing() | ||
3125 | 270 | if existing.data is not None: | ||
3126 | 271 | self.data = existing.data | ||
3127 | 272 | return self | ||
3128 | 273 | |||
3129 | 274 | def __call__(self): | ||
3130 | 275 | data = self.load(self.entity.open()) | ||
3131 | 276 | # self.data represents the product of previous layers | ||
3132 | 277 | if self.data: | ||
3133 | 278 | data = utils.deepmerge(self.data, data) | ||
3134 | 279 | |||
3135 | 280 | # Now apply any rules from config | ||
3136 | 281 | config = self.config | ||
3137 | 282 | if config: | ||
3138 | 283 | section = config.get(self.section) | ||
3139 | 284 | if section: | ||
3140 | 285 | dels = section.get('deletes', []) | ||
3141 | 286 | if self.prefix: | ||
3142 | 287 | namespace = data[self.prefix] | ||
3143 | 288 | else: | ||
3144 | 289 | namespace = data | ||
3145 | 290 | for key in dels: | ||
3146 | 291 | utils.delete_path(key, namespace) | ||
3147 | 292 | self.data = data | ||
3148 | 293 | if not self.target_file.parent.exists(): | ||
3149 | 294 | self.target_file.parent.makedirs_p() | ||
3150 | 295 | self.dump(data) | ||
3151 | 296 | return data | ||
3152 | 297 | |||
3153 | 298 | |||
3154 | 299 | class YAMLTactic(SerializedTactic): | ||
3155 | 300 | """Rule Driven YAML generation""" | ||
3156 | 301 | prefix = None | ||
3157 | 302 | |||
3158 | 303 | def load(self, fn): | ||
3159 | 304 | return yaml.load(fn, Loader=yaml.RoundTripLoader) | ||
3160 | 305 | |||
3161 | 306 | def dump(self, data): | ||
3162 | 307 | yaml.dump(data, self.target_file.open('w'), | ||
3163 | 308 | Dumper=yaml.RoundTripDumper, | ||
3164 | 309 | default_flow_style=False) | ||
3165 | 310 | |||
3166 | 311 | |||
3167 | 312 | class JSONTactic(SerializedTactic): | ||
3168 | 313 | """Rule Driven JSON generation""" | ||
3169 | 314 | prefix = None | ||
3170 | 315 | |||
3171 | 316 | def load(self, fn): | ||
3172 | 317 | return json.load(fn) | ||
3173 | 318 | |||
3174 | 319 | def dump(self, data): | ||
3175 | 320 | json.dump(data, self.target_file.open('w'), indent=2) | ||
3176 | 321 | |||
3177 | 322 | |||
3178 | 323 | class ComposerYAML(YAMLTactic, ExactMatch): | ||
3179 | 324 | FILENAME = "composer.yaml" | ||
3180 | 325 | |||
3181 | 326 | def read(self): | ||
3182 | 327 | self._raw_data = self.load(self.entity.open()) | ||
3183 | 328 | |||
3184 | 329 | def __call__(self): | ||
3185 | 330 | # rewrite includes to be the current source | ||
3186 | 331 | data = self._raw_data | ||
3187 | 332 | if data is None: | ||
3188 | 333 | return | ||
3189 | 334 | # The split should result in the series/charm path only | ||
3190 | 335 | # XXX: there will be strange interactions with cs: vs local: | ||
3191 | 336 | if 'is' not in data: | ||
3192 | 337 | data['is'] = str(self.current.url) | ||
3193 | 338 | inc = data.get('includes', []) | ||
3194 | 339 | norm = [] | ||
3195 | 340 | for i in inc: | ||
3196 | 341 | if ":" in i: | ||
3197 | 342 | norm.append(i) | ||
3198 | 343 | else: | ||
3199 | 344 | # Attempt to normalize to a repository base | ||
3200 | 345 | norm.append("/".join(path(i).splitall()[-2:])) | ||
3201 | 346 | if norm: | ||
3202 | 347 | data['includes'] = norm | ||
3203 | 348 | if not self.target_file.parent.exists(): | ||
3204 | 349 | self.target_file.parent.makedirs_p() | ||
3205 | 350 | self.dump(data) | ||
3206 | 351 | return data | ||
3207 | 352 | |||
3208 | 353 | |||
3209 | 354 | class MetadataYAML(YAMLTactic): | ||
3210 | 355 | """Rule Driven metadata.yaml generation""" | ||
3211 | 356 | section = "metadata" | ||
3212 | 357 | FILENAME = "metadata.yaml" | ||
3213 | 358 | KEY_ORDER = ["name", "summary", "maintainer", | ||
3214 | 359 | "description", "tags", | ||
3215 | 360 | "requires", "provides", "peers"] | ||
3216 | 361 | |||
3217 | 362 | def dump(self, data): | ||
3218 | 363 | if not data: | ||
3219 | 364 | return | ||
3220 | 365 | final = yaml.comments.CommentedMap() | ||
3221 | 366 | # assempt keys in know order | ||
3222 | 367 | for k in self.KEY_ORDER: | ||
3223 | 368 | if k in data: | ||
3224 | 369 | final[k] = data[k] | ||
3225 | 370 | missing = set(data.keys()) - set(self.KEY_ORDER) | ||
3226 | 371 | for k in sorted(missing): | ||
3227 | 372 | final[k] = data[k] | ||
3228 | 373 | super(MetadataYAML, self).dump(final) | ||
3229 | 374 | |||
3230 | 375 | |||
3231 | 376 | class ConfigYAML(MetadataYAML): | ||
3232 | 377 | """Rule driven config.yaml generation""" | ||
3233 | 378 | section = "config" | ||
3234 | 379 | prefix = "options" | ||
3235 | 380 | FILENAME = "config.yaml" | ||
3236 | 381 | |||
3237 | 382 | |||
3238 | 383 | class InstallerTactic(Tactic): | ||
3239 | 384 | def __str__(self): | ||
3240 | 385 | return "Installing software to {}".format(self.relpath) | ||
3241 | 386 | |||
3242 | 387 | @classmethod | ||
3243 | 388 | def trigger(cls, relpath): | ||
3244 | 389 | ext = relpath.splitext()[1] | ||
3245 | 390 | return ext in [".pypi", ] | ||
3246 | 391 | |||
3247 | 392 | def __call__(self): | ||
3248 | 393 | # install package reference in trigger file | ||
3249 | 394 | # in place directory of target | ||
3250 | 395 | # XXX: Should this map multiline to "-r", self.entity | ||
3251 | 396 | spec = self.entity.text().strip() | ||
3252 | 397 | target = self.target_file.dirname() | ||
3253 | 398 | target_dir = target / path(spec.split(" ", 1)[0]).normpath().namebase | ||
3254 | 399 | log.debug("pip installing {} as {}".format( | ||
3255 | 400 | spec, target_dir)) | ||
3256 | 401 | with utils.tempdir() as temp_dir: | ||
3257 | 402 | # We do this dance so we don't have | ||
3258 | 403 | # to guess package and .egg file names | ||
3259 | 404 | # we move everything in the tempdir to the target | ||
3260 | 405 | # and track it for later use in sign() | ||
3261 | 406 | utils.Process(("pip", | ||
3262 | 407 | "install", | ||
3263 | 408 | "-U", | ||
3264 | 409 | "--exists-action", | ||
3265 | 410 | "i", | ||
3266 | 411 | "-t", | ||
3267 | 412 | temp_dir, | ||
3268 | 413 | spec)).throw_on_error()() | ||
3269 | 414 | dirs = temp_dir.listdir() | ||
3270 | 415 | self._tracked = [] | ||
3271 | 416 | for d in dirs: | ||
3272 | 417 | d.move(target) | ||
3273 | 418 | self._tracked.append(target / d) | ||
3274 | 419 | |||
3275 | 420 | def sign(self): | ||
3276 | 421 | """return sign in the form {relpath: (origin layer, SHA256)} | ||
3277 | 422 | """ | ||
3278 | 423 | sigs = {} | ||
3279 | 424 | for d in self._tracked: | ||
3280 | 425 | for entry, sig in utils.walk(d, | ||
3281 | 426 | utils.sign, kind="files"): | ||
3282 | 427 | relpath = entry.relpath(self._target.directory) | ||
3283 | 428 | sigs[relpath] = (self.current.url, "dynamic", sig) | ||
3284 | 429 | return sigs | ||
3285 | 430 | |||
3286 | 431 | |||
3287 | 432 | def load_tactic(dpath, basedir): | ||
3288 | 433 | """Load a tactic from the current layer using a dotted path. The last | ||
3289 | 434 | element in the path should be a Tactic subclass | ||
3290 | 435 | """ | ||
3291 | 436 | obj = utils.load_class(dpath, basedir) | ||
3292 | 437 | if not issubclass(obj, Tactic): | ||
3293 | 438 | raise ValueError("Expected to load a tactic for %s" % dpath) | ||
3294 | 439 | return obj | ||
3295 | 440 | |||
3296 | 441 | |||
3297 | 442 | DEFAULT_TACTICS = [ | ||
3298 | 443 | ManifestTactic, | ||
3299 | 444 | InstallerTactic, | ||
3300 | 445 | MetadataYAML, | ||
3301 | 446 | ConfigYAML, | ||
3302 | 447 | ComposerYAML, | ||
3303 | 448 | CopyTactic | ||
3304 | 449 | ] | ||
3305 | 0 | 450 | ||
3306 | === added file 'charmtools/utils.py' | |||
3307 | --- charmtools/utils.py 1970-01-01 00:00:00 +0000 | |||
3308 | +++ charmtools/utils.py 2015-08-31 19:32:56 +0000 | |||
3309 | @@ -0,0 +1,518 @@ | |||
3310 | 1 | import copy | ||
3311 | 2 | import collections | ||
3312 | 3 | import hashlib | ||
3313 | 4 | import json | ||
3314 | 5 | import logging | ||
3315 | 6 | import os | ||
3316 | 7 | import re | ||
3317 | 8 | import subprocess | ||
3318 | 9 | import sys | ||
3319 | 10 | import tempfile | ||
3320 | 11 | import time | ||
3321 | 12 | from contextlib import contextmanager | ||
3322 | 13 | |||
3323 | 14 | from .compose.diff_match_patch import diff_match_patch | ||
3324 | 15 | import blessings | ||
3325 | 16 | import pathspec | ||
3326 | 17 | from path import path | ||
3327 | 18 | |||
3328 | 19 | log = logging.getLogger('utils') | ||
3329 | 20 | |||
3330 | 21 | |||
3331 | 22 | @contextmanager | ||
3332 | 23 | def cd(directory, make=False): | ||
3333 | 24 | cwd = os.getcwd() | ||
3334 | 25 | if not os.path.exists(directory) and make: | ||
3335 | 26 | os.makedirs(directory) | ||
3336 | 27 | os.chdir(directory) | ||
3337 | 28 | try: | ||
3338 | 29 | yield | ||
3339 | 30 | finally: | ||
3340 | 31 | os.chdir(cwd) | ||
3341 | 32 | |||
3342 | 33 | |||
3343 | 34 | @contextmanager | ||
3344 | 35 | def tempdir(): | ||
3345 | 36 | dirname = path(tempfile.mkdtemp()) | ||
3346 | 37 | with cd(dirname): | ||
3347 | 38 | yield dirname | ||
3348 | 39 | dirname.rmtree_p() | ||
3349 | 40 | |||
3350 | 41 | |||
3351 | 42 | def deepmerge(dest, src): | ||
3352 | 43 | """ | ||
3353 | 44 | Deep merge of two dicts. | ||
3354 | 45 | |||
3355 | 46 | This is destructive (`dest` is modified), but values | ||
3356 | 47 | from `src` are passed through `copy.deepcopy`. | ||
3357 | 48 | """ | ||
3358 | 49 | for k, v in src.iteritems(): | ||
3359 | 50 | if dest.get(k) and isinstance(v, dict): | ||
3360 | 51 | deepmerge(dest[k], v) | ||
3361 | 52 | else: | ||
3362 | 53 | dest[k] = copy.deepcopy(v) | ||
3363 | 54 | return dest | ||
3364 | 55 | |||
3365 | 56 | |||
3366 | 57 | def delete_path(path, obj): | ||
3367 | 58 | """Delete a dotted path from object, assuming each level is a dict""" | ||
3368 | 59 | parts = path.split('.') | ||
3369 | 60 | for p in parts[:-1]: | ||
3370 | 61 | obj = obj[p] | ||
3371 | 62 | del obj[parts[-1]] | ||
3372 | 63 | |||
3373 | 64 | |||
3374 | 65 | class NestedDict(dict): | ||
3375 | 66 | def __init__(self, dict_or_iterable=None, **kwargs): | ||
3376 | 67 | if dict_or_iterable: | ||
3377 | 68 | if isinstance(dict_or_iterable, dict): | ||
3378 | 69 | self.update(dict_or_iterable) | ||
3379 | 70 | elif isinstance(dict_or_iterable, collections.Iterable): | ||
3380 | 71 | for k, v in dict_or_iterable: | ||
3381 | 72 | self[k] = v | ||
3382 | 73 | if kwargs: | ||
3383 | 74 | self.update(kwargs) | ||
3384 | 75 | |||
3385 | 76 | def __setitem__(self, key, value): | ||
3386 | 77 | key = key.split('.') | ||
3387 | 78 | o = self | ||
3388 | 79 | for part in key[:-1]: | ||
3389 | 80 | o = o.setdefault(part, self.__class__()) | ||
3390 | 81 | dict.__setitem__(o, key[-1], value) | ||
3391 | 82 | |||
3392 | 83 | def __getitem__(self, key): | ||
3393 | 84 | o = self | ||
3394 | 85 | if '.' in key: | ||
3395 | 86 | parts = key.split('.') | ||
3396 | 87 | key = parts[-1] | ||
3397 | 88 | for part in parts[:-1]: | ||
3398 | 89 | o = o[part] | ||
3399 | 90 | |||
3400 | 91 | return dict.__getitem__(o, key) | ||
3401 | 92 | |||
3402 | 93 | def __getattr__(self, key): | ||
3403 | 94 | try: | ||
3404 | 95 | return self[key] | ||
3405 | 96 | except KeyError: | ||
3406 | 97 | raise AttributeError(key) | ||
3407 | 98 | |||
3408 | 99 | def get(self, key, default=None): | ||
3409 | 100 | try: | ||
3410 | 101 | return self[key] | ||
3411 | 102 | except KeyError: | ||
3412 | 103 | return default | ||
3413 | 104 | |||
3414 | 105 | def update(self, other): | ||
3415 | 106 | deepmerge(self, other) | ||
3416 | 107 | |||
3417 | 108 | |||
3418 | 109 | class ProcessResult(object): | ||
3419 | 110 | def __init__(self, command, exit_code, stdout, stderr): | ||
3420 | 111 | self.command = command | ||
3421 | 112 | self.exit_code = exit_code | ||
3422 | 113 | self.stdout = stdout | ||
3423 | 114 | self.stderr = stderr | ||
3424 | 115 | |||
3425 | 116 | def __repr__(self): | ||
3426 | 117 | return '<ProcessResult "%s" result %s>' % (self.cmd, self.exit_code) | ||
3427 | 118 | |||
3428 | 119 | @property | ||
3429 | 120 | def cmd(self): | ||
3430 | 121 | return ' '.join(self.command) | ||
3431 | 122 | |||
3432 | 123 | @property | ||
3433 | 124 | def output(self): | ||
3434 | 125 | result = '' | ||
3435 | 126 | if self.stdout: | ||
3436 | 127 | result += self.stdout | ||
3437 | 128 | if self.stderr: | ||
3438 | 129 | result += self.stderr | ||
3439 | 130 | return result.strip() | ||
3440 | 131 | |||
3441 | 132 | @property | ||
3442 | 133 | def json(self): | ||
3443 | 134 | if self.stdout: | ||
3444 | 135 | return json.loads(self.stdout) | ||
3445 | 136 | return None | ||
3446 | 137 | |||
3447 | 138 | def __eq__(self, other): | ||
3448 | 139 | return self.exit_code == other | ||
3449 | 140 | |||
3450 | 141 | def __bool__(self): | ||
3451 | 142 | return self.exit_code == 0 | ||
3452 | 143 | |||
3453 | 144 | __nonzero__ = __bool__ | ||
3454 | 145 | |||
3455 | 146 | def throw_on_error(self): | ||
3456 | 147 | if not bool(self): | ||
3457 | 148 | raise subprocess.CalledProcessError( | ||
3458 | 149 | self.exit_code, self.command, output=self.output) | ||
3459 | 150 | |||
3460 | 151 | |||
3461 | 152 | class Process(object): | ||
3462 | 153 | def __init__(self, command=None, throw=False, log=log, **kwargs): | ||
3463 | 154 | if isinstance(command, str): | ||
3464 | 155 | command = (command, ) | ||
3465 | 156 | self.command = command | ||
3466 | 157 | self._throw_on_error = False | ||
3467 | 158 | self.log = log | ||
3468 | 159 | self._kw = kwargs | ||
3469 | 160 | |||
3470 | 161 | def __repr__(self): | ||
3471 | 162 | return "<Command %s>" % (self.command, ) | ||
3472 | 163 | |||
3473 | 164 | def throw_on_error(self, throw=True): | ||
3474 | 165 | self._throw_on_error = throw | ||
3475 | 166 | return self | ||
3476 | 167 | |||
3477 | 168 | def __call__(self, *args, **kw): | ||
3478 | 169 | kwargs = dict(stdout=subprocess.PIPE, | ||
3479 | 170 | stderr=subprocess.STDOUT) | ||
3480 | 171 | if self._kw: | ||
3481 | 172 | kwargs.update(self._kw) | ||
3482 | 173 | kwargs.update(kw) | ||
3483 | 174 | if self.command: | ||
3484 | 175 | all_args = self.command + args | ||
3485 | 176 | else: | ||
3486 | 177 | all_args = args | ||
3487 | 178 | if 'env' not in kwargs: | ||
3488 | 179 | kwargs['env'] = os.environ | ||
3489 | 180 | |||
3490 | 181 | p = subprocess.Popen(all_args, **kwargs) | ||
3491 | 182 | stdout, stderr = p.communicate() | ||
3492 | 183 | self.log.debug(stdout) | ||
3493 | 184 | stdout = stdout.strip() | ||
3494 | 185 | if stderr is not None: | ||
3495 | 186 | stderr = stderr.strip() | ||
3496 | 187 | self.log.debug(stderr) | ||
3497 | 188 | exit_code = p.poll() | ||
3498 | 189 | result = ProcessResult(all_args, exit_code, stdout, stderr) | ||
3499 | 190 | self.log.debug("process: %s (%d)", result.cmd, result.exit_code) | ||
3500 | 191 | if self._throw_on_error: | ||
3501 | 192 | result.throw_on_error() | ||
3502 | 193 | return result | ||
3503 | 194 | |||
3504 | 195 | command = Process | ||
3505 | 196 | |||
3506 | 197 | |||
3507 | 198 | class Commander(object): | ||
3508 | 199 | def __init__(self, log=log): | ||
3509 | 200 | self.log = log | ||
3510 | 201 | |||
3511 | 202 | def set_log(self, logger): | ||
3512 | 203 | self.log = logger | ||
3513 | 204 | |||
3514 | 205 | def __getattr__(self, key): | ||
3515 | 206 | return command((key,), log=self.log) | ||
3516 | 207 | |||
3517 | 208 | def check(self, *args, **kwargs): | ||
3518 | 209 | kwargs.update({'log': self.log}) | ||
3519 | 210 | return command(command=args, **kwargs).throw_on_error() | ||
3520 | 211 | |||
3521 | 212 | def __call__(self, *args, **kwargs): | ||
3522 | 213 | kwargs.update({'log': self.log}) | ||
3523 | 214 | return command(command=args, shell=True, **kwargs) | ||
3524 | 215 | |||
3525 | 216 | |||
3526 | 217 | sh = Commander() | ||
3527 | 218 | dig = Process(('dig', '+short')) | ||
3528 | 219 | api_endpoints = Process(('juju', 'api-endpoints')) | ||
3529 | 220 | |||
3530 | 221 | |||
3531 | 222 | def wait_for(timeout, interval, *callbacks, **kwargs): | ||
3532 | 223 | """ | ||
3533 | 224 | Repeatedly try callbacks until all return True | ||
3534 | 225 | |||
3535 | 226 | This will wait interval seconds between attempts and will error out | ||
3536 | 227 | after timeout has been exceeded. | ||
3537 | 228 | |||
3538 | 229 | Callbacks will be called with the container as their argument. | ||
3539 | 230 | |||
3540 | 231 | Setting timeout to zero will loop until cancelled, power runs outs, | ||
3541 | 232 | hardware fails, or the heat death of the universe. | ||
3542 | 233 | """ | ||
3543 | 234 | start = time.time() | ||
3544 | 235 | if timeout: | ||
3545 | 236 | end = start + timeout | ||
3546 | 237 | else: | ||
3547 | 238 | end = 0 | ||
3548 | 239 | |||
3549 | 240 | bar = kwargs.get('bar', None) | ||
3550 | 241 | message = kwargs.get('message', None) | ||
3551 | 242 | once = 1 | ||
3552 | 243 | while True: | ||
3553 | 244 | passes = True | ||
3554 | 245 | if end > 0 and time.time() > end: | ||
3555 | 246 | raise OSError("Timeout exceeded in wait_for") | ||
3556 | 247 | if bar: | ||
3557 | 248 | bar.next(once, message=message) | ||
3558 | 249 | if once == 1: | ||
3559 | 250 | once = 0 | ||
3560 | 251 | if int(time.time()) % interval == 0: | ||
3561 | 252 | for callback in callbacks: | ||
3562 | 253 | result = callback() | ||
3563 | 254 | passes = passes & bool(result) | ||
3564 | 255 | if passes is False: | ||
3565 | 256 | break | ||
3566 | 257 | if passes is True: | ||
3567 | 258 | break | ||
3568 | 259 | time.sleep(1) | ||
3569 | 260 | |||
3570 | 261 | |||
3571 | 262 | def until(*callbacks, **kwargs): | ||
3572 | 263 | return wait_for(0, 20, *callbacks, **kwargs) | ||
3573 | 264 | |||
3574 | 265 | |||
3575 | 266 | def retry(attempts, *callbacks, **kwargs): | ||
3576 | 267 | """ | ||
3577 | 268 | Repeatedly try callbacks a fixed number of times or until all return True | ||
3578 | 269 | """ | ||
3579 | 270 | for attempt in xrange(attempts): | ||
3580 | 271 | if 'bar' in kwargs: | ||
3581 | 272 | kwargs['bar'].next(attempt == 0, message=kwargs.get('message')) | ||
3582 | 273 | for callback in callbacks: | ||
3583 | 274 | if not callback(): | ||
3584 | 275 | break | ||
3585 | 276 | else: | ||
3586 | 277 | break | ||
3587 | 278 | else: | ||
3588 | 279 | raise OSError("Retry attempts exceeded") | ||
3589 | 280 | return True | ||
3590 | 281 | |||
3591 | 282 | |||
3592 | 283 | def which(program): | ||
3593 | 284 | def is_exe(fpath): | ||
3594 | 285 | return os.path.isfile(fpath) and os.access(fpath, os.X_OK) | ||
3595 | 286 | |||
3596 | 287 | fpath, fname = os.path.split(program) | ||
3597 | 288 | if fpath: | ||
3598 | 289 | if is_exe(program): | ||
3599 | 290 | return program | ||
3600 | 291 | else: | ||
3601 | 292 | for fpath in os.environ["PATH"].split(os.pathsep): | ||
3602 | 293 | fpath = fpath.strip('"') | ||
3603 | 294 | exe_file = os.path.join(fpath, program) | ||
3604 | 295 | if is_exe(exe_file): | ||
3605 | 296 | return exe_file | ||
3606 | 297 | return None | ||
3607 | 298 | |||
3608 | 299 | |||
3609 | 300 | def load_class(dpath, workingdir=None): | ||
3610 | 301 | # we expect the last element of the path | ||
3611 | 302 | if not workingdir: | ||
3612 | 303 | workingdir = os.getcwd() | ||
3613 | 304 | with cd(workingdir): | ||
3614 | 305 | modpath, classname = dpath.rsplit('.', 1) | ||
3615 | 306 | modpath = path(modpath.replace(".", "/")) | ||
3616 | 307 | if not modpath.exists(): | ||
3617 | 308 | modpath += ".py" | ||
3618 | 309 | if not modpath.exists(): | ||
3619 | 310 | raise OSError("Unable to load {} from {}".format( | ||
3620 | 311 | dpath, workingdir)) | ||
3621 | 312 | namespace = {} | ||
3622 | 313 | execfile(modpath, globals(), namespace) | ||
3623 | 314 | klass = namespace.get(classname) | ||
3624 | 315 | if klass is None: | ||
3625 | 316 | raise ImportError("Unable to load class {} at {}".format( | ||
3626 | 317 | classname, dpath)) | ||
3627 | 318 | return klass | ||
3628 | 319 | |||
3629 | 320 | |||
3630 | 321 | def walk(pathobj, fn, matcher=None, kind=None, **kwargs): | ||
3631 | 322 | """walk pathobj calling fn on each matched entry yielding each | ||
3632 | 323 | result. If kind is 'file' or 'dir' only that type ofd entry will | ||
3633 | 324 | be walked. matcher is an optional function returning bool indicating | ||
3634 | 325 | if the entry should be processed. | ||
3635 | 326 | """ | ||
3636 | 327 | p = path(pathobj) | ||
3637 | 328 | walker = p.walk | ||
3638 | 329 | if kind == "files": | ||
3639 | 330 | walker = p.walkfiles | ||
3640 | 331 | elif kind == "dir": | ||
3641 | 332 | walker = p.walkdir | ||
3642 | 333 | |||
3643 | 334 | for entry in walker(): | ||
3644 | 335 | relpath = entry.relpath(pathobj) | ||
3645 | 336 | if matcher and not matcher(relpath): | ||
3646 | 337 | continue | ||
3647 | 338 | yield (entry, fn(entry, **kwargs)) | ||
3648 | 339 | |||
3649 | 340 | |||
3650 | 341 | def ignore_matcher(ignores=[]): | ||
3651 | 342 | spec = pathspec.PathSpec.from_lines(pathspec.GitIgnorePattern, ignores) | ||
3652 | 343 | |||
3653 | 344 | def matcher(entity): | ||
3654 | 345 | return entity not in spec.match_files((entity,)) | ||
3655 | 346 | return matcher | ||
3656 | 347 | |||
3657 | 348 | |||
3658 | 349 | def sign(pathobj): | ||
3659 | 350 | p = path(pathobj) | ||
3660 | 351 | if not p.isfile(): | ||
3661 | 352 | return None | ||
3662 | 353 | return hashlib.sha256(p.bytes()).hexdigest() | ||
3663 | 354 | |||
3664 | 355 | |||
3665 | 356 | def delta_signatures(manifest_filename, ignorer=None): | ||
3666 | 357 | md = path(manifest_filename) | ||
3667 | 358 | repo = md.normpath().dirname() | ||
3668 | 359 | |||
3669 | 360 | expected = json.load(md.open()) | ||
3670 | 361 | current = {} | ||
3671 | 362 | for rel, sig in walk(repo, sign): | ||
3672 | 363 | rel = rel.relpath(repo) | ||
3673 | 364 | current[rel] = sig | ||
3674 | 365 | add, change, delete = set(), set(), set() | ||
3675 | 366 | |||
3676 | 367 | for p, s in current.items(): | ||
3677 | 368 | fp = repo / p | ||
3678 | 369 | if not fp.isfile(): | ||
3679 | 370 | continue | ||
3680 | 371 | if ignorer and not ignorer(p): | ||
3681 | 372 | continue | ||
3682 | 373 | |||
3683 | 374 | if p not in expected["signatures"]: | ||
3684 | 375 | add.add(p) | ||
3685 | 376 | continue | ||
3686 | 377 | # layer, kind, sig | ||
3687 | 378 | # don't include items generated only for the last layer | ||
3688 | 379 | if expected["signatures"][p][0] == "composer": | ||
3689 | 380 | continue | ||
3690 | 381 | if expected["signatures"][p][2] != s: | ||
3691 | 382 | change.add(p) | ||
3692 | 383 | |||
3693 | 384 | for p, d in expected["signatures"].items(): | ||
3694 | 385 | if p not in current: | ||
3695 | 386 | delete.add(path(p)) | ||
3696 | 387 | return add, change, delete | ||
3697 | 388 | |||
3698 | 389 | |||
3699 | 390 | class ColoredFormatter(logging.Formatter): | ||
3700 | 391 | |||
3701 | 392 | def __init__(self, terminal, *args, **kwargs): | ||
3702 | 393 | super(ColoredFormatter, self).__init__(*args, **kwargs) | ||
3703 | 394 | self._terminal = terminal | ||
3704 | 395 | |||
3705 | 396 | def format(self, record): | ||
3706 | 397 | output = super(ColoredFormatter, self).format(record) | ||
3707 | 398 | if record.levelno >= logging.CRITICAL: | ||
3708 | 399 | line_color = self._terminal.bold_yellow_on_red | ||
3709 | 400 | elif record.levelno >= logging.ERROR: | ||
3710 | 401 | line_color = self._terminal.red | ||
3711 | 402 | elif record.levelno >= logging.WARNING: | ||
3712 | 403 | line_color = self._terminal.yellow | ||
3713 | 404 | elif record.levelno >= logging.INFO: | ||
3714 | 405 | line_color = self._terminal.green | ||
3715 | 406 | else: | ||
3716 | 407 | line_color = self._terminal.cyan | ||
3717 | 408 | return line_color(output) | ||
3718 | 409 | |||
3719 | 410 | |||
3720 | 411 | class TermWriter(object): | ||
3721 | 412 | def __init__(self, fp=None, term=None): | ||
3722 | 413 | if fp is None: | ||
3723 | 414 | fp = sys.stdout | ||
3724 | 415 | self.fp = fp | ||
3725 | 416 | if term is None: | ||
3726 | 417 | term = blessings.Terminal() | ||
3727 | 418 | self.term = term | ||
3728 | 419 | |||
3729 | 420 | def __getattr__(self, key): | ||
3730 | 421 | return getattr(self.term, key) | ||
3731 | 422 | |||
3732 | 423 | def write(self, msg, *args, **kwargs): | ||
3733 | 424 | if 't' in kwargs: | ||
3734 | 425 | raise ValueError("Using reserved token 't' in TermWriter.write") | ||
3735 | 426 | kwargs['t'] = self.term | ||
3736 | 427 | self.fp.write(msg.format(*args, **kwargs)) | ||
3737 | 428 | |||
3738 | 429 | |||
3739 | 430 | class _O(dict): | ||
3740 | 431 | def __getattr__(self, k): | ||
3741 | 432 | return self[k] | ||
3742 | 433 | |||
3743 | 434 | REACTIVE_PATTERNS = [ | ||
3744 | 435 | re.compile("\s*@when"), | ||
3745 | 436 | re.compile(".set_state\(") | ||
3746 | 437 | ] | ||
3747 | 438 | |||
3748 | 439 | |||
3749 | 440 | def delta_python(orig, dest, patterns=REACTIVE_PATTERNS, context=2): | ||
3750 | 441 | """Delta two python files looking for certain patterns""" | ||
3751 | 442 | if isinstance(orig, path): | ||
3752 | 443 | od = orig.text() | ||
3753 | 444 | elif hasattr(orig, 'read'): | ||
3754 | 445 | od = orig.read() | ||
3755 | 446 | else: | ||
3756 | 447 | raise TypeError("Expected path() or file(), got %s" % type(orig)) | ||
3757 | 448 | if isinstance(dest, path): | ||
3758 | 449 | dd = dest.text() | ||
3759 | 450 | elif hasattr(orig, 'read'): | ||
3760 | 451 | dd = dest.read() | ||
3761 | 452 | else: | ||
3762 | 453 | raise TypeError("Expected path() or file(), got %s" % type(dest)) | ||
3763 | 454 | |||
3764 | 455 | differ = diff_match_patch() | ||
3765 | 456 | linect = 0 | ||
3766 | 457 | lastMatch = None | ||
3767 | 458 | for res in differ.diff_main(od, dd): | ||
3768 | 459 | if res[0] == diff_match_patch.DIFF_EQUAL: | ||
3769 | 460 | linect += res[1].count('\n') | ||
3770 | 461 | lastMatch = res[:] | ||
3771 | 462 | continue | ||
3772 | 463 | elif res[0] == diff_match_patch.DIFF_INSERT: | ||
3773 | 464 | linect += res[1].count('\n') | ||
3774 | 465 | else: | ||
3775 | 466 | linect -= res[1].count('\n') | ||
3776 | 467 | |||
3777 | 468 | for p in patterns: | ||
3778 | 469 | if p.search(lastMatch[1]): | ||
3779 | 470 | yield [linect, lastMatch, res] | ||
3780 | 471 | break | ||
3781 | 472 | |||
3782 | 473 | |||
3783 | 474 | def delta_python_dump(orig, dest, patterns=REACTIVE_PATTERNS, | ||
3784 | 475 | context=2, term=None, | ||
3785 | 476 | from_name=None, to_name=None): | ||
3786 | 477 | if term is None: | ||
3787 | 478 | term = TermWriter() | ||
3788 | 479 | |||
3789 | 480 | def norm_sources(orig, dest): | ||
3790 | 481 | if from_name: | ||
3791 | 482 | oname = from_name | ||
3792 | 483 | else: | ||
3793 | 484 | oname = orig | ||
3794 | 485 | if to_name: | ||
3795 | 486 | dname = to_name | ||
3796 | 487 | else: | ||
3797 | 488 | dname = dest | ||
3798 | 489 | return _O({'orig_name': oname, 'dest_name': dname}) | ||
3799 | 490 | |||
3800 | 491 | def prefix_lines(lines, lineno): | ||
3801 | 492 | if isinstance(lines, str): | ||
3802 | 493 | lines = lines.splitlines() | ||
3803 | 494 | for i, l in enumerate(lines): | ||
3804 | 495 | lines[i] = "%-5d| %s" % (lineno + i, l) | ||
3805 | 496 | return "\n".join(lines) | ||
3806 | 497 | |||
3807 | 498 | i = 0 | ||
3808 | 499 | for lineno, last, current in delta_python(orig, dest, patterns, context): | ||
3809 | 500 | # pull enough context | ||
3810 | 501 | if last: | ||
3811 | 502 | context_lines = last[1].splitlines()[-context:] | ||
3812 | 503 | message = norm_sources(orig, dest) | ||
3813 | 504 | message['context'] = prefix_lines(context_lines, lineno - context) | ||
3814 | 505 | message['lineno'] = lineno | ||
3815 | 506 | message['delta'] = current[1] | ||
3816 | 507 | s = {diff_match_patch.DIFF_EQUAL: term.normal, | ||
3817 | 508 | diff_match_patch.DIFF_INSERT: term.green, | ||
3818 | 509 | diff_match_patch.DIFF_DELETE: term.red}[current[0]] | ||
3819 | 510 | message['status_color'] = s | ||
3820 | 511 | # output message | ||
3821 | 512 | term.write("{t.bold}{m.orig_name}{t.normal} --> " | ||
3822 | 513 | "{t.bold}{m.dest_name}{t.normal}:\n", | ||
3823 | 514 | m=message) | ||
3824 | 515 | term.write("{m.context}{m.status_color}{m.delta}{t.normal}\n", | ||
3825 | 516 | m=message) | ||
3826 | 517 | i += 1 | ||
3827 | 518 | return i == 0 | ||
3828 | 0 | 519 | ||
3829 | === added file 'doc/source/compose-intro.md' | |||
3830 | --- doc/source/compose-intro.md 1970-01-01 00:00:00 +0000 | |||
3831 | +++ doc/source/compose-intro.md 2015-08-31 19:32:56 +0000 | |||
3832 | @@ -0,0 +1,18 @@ | |||
3833 | 1 | charm compose/refresh combines various included layers to produce an output | ||
3834 | 2 | charm. These layers can be maintained and updated separately and then the | ||
3835 | 3 | refresh process can be used to regenerate the charm. | ||
3836 | 4 | |||
3837 | 5 | COMPOSER_PATH is a ':' delimited path list used to resolve local include matches. | ||
3838 | 6 | INTERFACE_PATH is the directory from which interfaces will be resolved. | ||
3839 | 7 | |||
3840 | 8 | Examples: | ||
3841 | 9 | charm compose -o /tmp/out trusty/mycharm | ||
3842 | 10 | |||
3843 | 11 | Will generate /tmp/out/trusty/mycharm will all the includes specified. | ||
3844 | 12 | |||
3845 | 13 | WORKFLOW | ||
3846 | 14 | ======== | ||
3847 | 15 | |||
3848 | 16 | Typically you'll make changes in the layer owning the file(s) in queustion | ||
3849 | 17 | and then recompose the charm and deploy/upgrade-charm that. You'll not | ||
3850 | 18 | want to edit the generated charm directly. | ||
3851 | 0 | 19 | ||
3852 | === added file 'doc/source/composer.md' | |||
3853 | --- doc/source/composer.md 1970-01-01 00:00:00 +0000 | |||
3854 | +++ doc/source/composer.md 2015-08-31 19:32:56 +0000 | |||
3855 | @@ -0,0 +1,123 @@ | |||
3856 | 1 | Juju Charm Composition | ||
3857 | 2 | ====================== | ||
3858 | 3 | |||
3859 | 4 | Status | *Alpha* | ||
3860 | 5 | ------- ------- | ||
3861 | 6 | |||
3862 | 7 | This is a Prototype designed to flush out requirements around Charm | ||
3863 | 8 | Composition. Today its very common to fork charms for minor changes or to have | ||
3864 | 9 | to use subordinate charms to take advantages of frameworks where you need to | ||
3865 | 10 | deploy a custom workload to an existing runtime. With charm composition you | ||
3866 | 11 | should be able to include from a charm that provides the runtime (or just some | ||
3867 | 12 | well contained feature set) and maintain you're delta as a 'layer' that gets | ||
3868 | 13 | composed with its base to produce a new charm. | ||
3869 | 14 | |||
3870 | 15 | This process should be runnable repeatedly allowing charms to be regenerated. | ||
3871 | 16 | |||
3872 | 17 | |||
3873 | 18 | This work is currently feature incomplete but does allow the generation of | ||
3874 | 19 | simple charms and useful basic composition. It is my hope that this will | ||
3875 | 20 | encourage discussion of the feature set needed to one day have charm | ||
3876 | 21 | composition supported natively in juju-core. | ||
3877 | 22 | |||
3878 | 23 | |||
3879 | 24 | Today the system can be run as follows: | ||
3880 | 25 | |||
3881 | 26 | ./juju_compose.py -o <output_repo> <charm to build from> | ||
3882 | 27 | |||
3883 | 28 | So you might use the included (very unrealistic) test case as like: | ||
3884 | 29 | |||
3885 | 30 | ./juju_compose -o out -n foo tests/trusty/tester | ||
3886 | 31 | |||
3887 | 32 | Running this should produce a charm in out/trusty/foo which is composed | ||
3888 | 33 | according to the composer.yaml file in tests/trusty/tester. While this isn't | ||
3889 | 34 | documented yet it shows some of the basic features of diverting hooks (for | ||
3890 | 35 | pre/post hooks support), replacing files, merging metadata.yaml changes, etc. | ||
3891 | 36 | |||
3892 | 37 | It should be enough to give you an idea how it works. In order for this example | ||
3893 | 38 | to run you'll need to pip install bundletester as it shares some code with that | ||
3894 | 39 | project. | ||
3895 | 40 | |||
3896 | 41 | Theory | ||
3897 | 42 | ====== | ||
3898 | 43 | |||
3899 | 44 | A generated charm is composed of layers. The generator acts almost like a | ||
3900 | 45 | compiler taking the input from each layer and producing an output file in the | ||
3901 | 46 | resultant charm. | ||
3902 | 47 | |||
3903 | 48 | The generator keeps track of which layer owns each file and allows layers to | ||
3904 | 49 | update files they own should the charm be refreshed later. | ||
3905 | 50 | |||
3906 | 51 | The generated charm itself should be treated as immutable. The top layer that | ||
3907 | 52 | was used to generate it is where user level modifications should live. | ||
3908 | 53 | |||
3909 | 54 | |||
3910 | 55 | Setting Up your Repo | ||
3911 | 56 | ==================== | ||
3912 | 57 | This currently allows for two new ENV variables when run | ||
3913 | 58 | COMPOSER_PATH: a ':' separated list of JUJU_REPOSITORY that should be searched for includes | ||
3914 | 59 | INTERFACE_PATH: a ':' separated list of paths to resolve interface:_name_ includes from. | ||
3915 | 60 | |||
3916 | 61 | JUJU_REPOSITORY entries take the usual format *series*/*charm* | ||
3917 | 62 | INTERFACE repos take the format of *interface_name*. Where interface_name is | ||
3918 | 63 | the name as it appears in the metadata.yaml | ||
3919 | 64 | |||
3920 | 65 | Composition Types | ||
3921 | 66 | ================= | ||
3922 | 67 | |||
3923 | 68 | Each file in each layer gets matched by a single Tactic. Tactics implement how | ||
3924 | 69 | the data in a file moves from one layer to the next (and finally to the target | ||
3925 | 70 | charm). By default this will be a simple copy but in the cases of certain files | ||
3926 | 71 | (mostly known YAML files like metadata.yaml and config.yaml) each layer is | ||
3927 | 72 | combined with the previous layers before being written. | ||
3928 | 73 | |||
3929 | 74 | Normally the default tactics are fine but you have the ability in the | ||
3930 | 75 | composer.yaml to list a set of Tactics objects that will be checked before the | ||
3931 | 76 | default and control how data moves from one layer to the next. | ||
3932 | 77 | |||
3933 | 78 | |||
3934 | 79 | composer.yaml | ||
3935 | 80 | ============= | ||
3936 | 81 | Each layer used to build a charm can have a composer.yaml file. The top layer | ||
3937 | 82 | (the one actually invoked from the command line) must. These tell the generator what do, | ||
3938 | 83 | ranging from which base layers to include, to which interfaces. They also allow for | ||
3939 | 84 | the inclusion of specialized directives for processing some types of files. | ||
3940 | 85 | |||
3941 | 86 | Keys: | ||
3942 | 87 | includes: ["trusty/mysql", "interface:mysql"] | ||
3943 | 88 | tactics: [ dottedpath.toTacticClass, ] | ||
3944 | 89 | config: | ||
3945 | 90 | deletes: | ||
3946 | 91 | - key names | ||
3947 | 92 | metadata: | ||
3948 | 93 | deletes: | ||
3949 | 94 | - key names | ||
3950 | 95 | |||
3951 | 96 | |||
3952 | 97 | Includes is a list of one or more layers and interfaces that should be | ||
3953 | 98 | composited. Those layers may themselves have other includes and/or | ||
3954 | 99 | interfaces. | ||
3955 | 100 | |||
3956 | 101 | Tactics is a list of Tactics to be loaded. See juju_compose.tactics.Tactics for | ||
3957 | 102 | the default interface. You'll typically need to implement at least a trigger() method | ||
3958 | 103 | and a __call__() method. | ||
3959 | 104 | |||
3960 | 105 | config and metadata take optional lists of keys to remove from config.yaml and | ||
3961 | 106 | metadata.yaml when generating their data. This allows for charms to, for | ||
3962 | 107 | example, narrow what they expose to clients. | ||
3963 | 108 | |||
3964 | 109 | |||
3965 | 110 | Inspect | ||
3966 | 111 | ======= | ||
3967 | 112 | |||
3968 | 113 | If you've already generated a charm you can see which layers own which files by | ||
3969 | 114 | using the include **juju inspect [charmdir]*** command. This should render a | ||
3970 | 115 | tree of the files in the color of each layer. Each layers assigned color is | ||
3971 | 116 | presented in a legend at the top of the output. | ||
3972 | 117 | |||
3973 | 118 | TODO: | ||
3974 | 119 | - lint about methods in base layer not provided/extended in lower | ||
3975 | 120 | layers | ||
3976 | 121 | |||
3977 | 122 | |||
3978 | 123 | |||
3979 | 0 | 124 | ||
3980 | === removed file 'ez_setup.py' | |||
3981 | --- ez_setup.py 2013-02-25 21:54:26 +0000 | |||
3982 | +++ ez_setup.py 1970-01-01 00:00:00 +0000 | |||
3983 | @@ -1,272 +0,0 @@ | |||
3984 | 1 | #!python | ||
3985 | 2 | |||
3986 | 3 | # NOTE TO LAUNCHPAD DEVELOPERS: This is a bootstrapping file from the | ||
3987 | 4 | # setuptools project. It is imported by our setup.py. | ||
3988 | 5 | |||
3989 | 6 | """Bootstrap setuptools installation | ||
3990 | 7 | |||
3991 | 8 | If you want to use setuptools in your package's setup.py, just include this | ||
3992 | 9 | file in the same directory with it, and add this to the top of your setup.py:: | ||
3993 | 10 | |||
3994 | 11 | from ez_setup import use_setuptools | ||
3995 | 12 | use_setuptools() | ||
3996 | 13 | |||
3997 | 14 | If you want to require a specific version of setuptools, set a download | ||
3998 | 15 | mirror, or use an alternate download directory, you can do so by supplying | ||
3999 | 16 | the appropriate options to ``use_setuptools()``. | ||
4000 | 17 | |||
4001 | 18 | This file can also be run as a script to install or upgrade setuptools. | ||
4002 | 19 | """ | ||
4003 | 20 | import sys | ||
4004 | 21 | DEFAULT_VERSION = "0.6c11" | ||
4005 | 22 | DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" \ | ||
4006 | 23 | % sys.version[:3] | ||
4007 | 24 | |||
4008 | 25 | md5_data = { | ||
4009 | 26 | 'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca', | ||
4010 | 27 | 'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb', | ||
4011 | 28 | 'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b', | ||
4012 | 29 | 'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a', | ||
4013 | 30 | 'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618', | ||
4014 | 31 | 'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac', | ||
4015 | 32 | 'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5', | ||
4016 | 33 | 'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4', | ||
4017 | 34 | 'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c', | ||
4018 | 35 | 'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b', | ||
4019 | 36 | 'setuptools-0.6c10-py2.3.egg': 'ce1e2ab5d3a0256456d9fc13800a7090', | ||
4020 | 37 | 'setuptools-0.6c10-py2.4.egg': '57d6d9d6e9b80772c59a53a8433a5dd4', | ||
4021 | 38 | 'setuptools-0.6c10-py2.5.egg': 'de46ac8b1c97c895572e5e8596aeb8c7', | ||
4022 | 39 | 'setuptools-0.6c10-py2.6.egg': '58ea40aef06da02ce641495523a0b7f5', | ||
4023 | 40 | 'setuptools-0.6c11-py2.3.egg': '2baeac6e13d414a9d28e7ba5b5a596de', | ||
4024 | 41 | 'setuptools-0.6c11-py2.4.egg': 'bd639f9b0eac4c42497034dec2ec0c2b', | ||
4025 | 42 | 'setuptools-0.6c11-py2.5.egg': '64c94f3bf7a72a13ec83e0b24f2749b2', | ||
4026 | 43 | 'setuptools-0.6c11-py2.6.egg': 'bfa92100bd772d5a213eedd356d64086', | ||
4027 | 44 | 'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27', | ||
4028 | 45 | 'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277', | ||
4029 | 46 | 'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa', | ||
4030 | 47 | 'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e', | ||
4031 | 48 | 'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e', | ||
4032 | 49 | 'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f', | ||
4033 | 50 | 'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2', | ||
4034 | 51 | 'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc', | ||
4035 | 52 | 'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167', | ||
4036 | 53 | 'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64', | ||
4037 | 54 | 'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d', | ||
4038 | 55 | 'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20', | ||
4039 | 56 | 'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab', | ||
4040 | 57 | 'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53', | ||
4041 | 58 | 'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2', | ||
4042 | 59 | 'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e', | ||
4043 | 60 | 'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372', | ||
4044 | 61 | 'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902', | ||
4045 | 62 | 'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de', | ||
4046 | 63 | 'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b', | ||
4047 | 64 | 'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03', | ||
4048 | 65 | 'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a', | ||
4049 | 66 | 'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6', | ||
4050 | 67 | 'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a', | ||
4051 | 68 | } | ||
4052 | 69 | |||
4053 | 70 | import os | ||
4054 | 71 | import sys | ||
4055 | 72 | |||
4056 | 73 | try: | ||
4057 | 74 | from hashlib import md5 | ||
4058 | 75 | except ImportError: | ||
4059 | 76 | from md5 import md5 | ||
4060 | 77 | |||
4061 | 78 | |||
4062 | 79 | def _validate_md5(egg_name, data): | ||
4063 | 80 | if egg_name in md5_data: | ||
4064 | 81 | digest = md5(data).hexdigest() | ||
4065 | 82 | if digest != md5_data[egg_name]: | ||
4066 | 83 | print >>sys.stderr, ( | ||
4067 | 84 | "md5 validation of %s failed! (Possible download problem?)" | ||
4068 | 85 | % egg_name | ||
4069 | 86 | ) | ||
4070 | 87 | sys.exit(2) | ||
4071 | 88 | return data | ||
4072 | 89 | |||
4073 | 90 | |||
4074 | 91 | def use_setuptools( | ||
4075 | 92 | version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, | ||
4076 | 93 | download_delay=15 | ||
4077 | 94 | ): | ||
4078 | 95 | """Automatically find/download setuptools and make it available on sys.path | ||
4079 | 96 | |||
4080 | 97 | `version` should be a valid setuptools version number that is available | ||
4081 | 98 | as an egg for download under the `download_base` URL (which should end with | ||
4082 | 99 | a '/'). `to_dir` is the directory where setuptools will be downloaded, if | ||
4083 | 100 | it is not already available. If `download_delay` is specified, it should | ||
4084 | 101 | be the number of seconds that will be paused before initiating a download, | ||
4085 | 102 | should one be required. If an older version of setuptools is installed, | ||
4086 | 103 | this routine will print a message to ``sys.stderr`` and raise SystemExit in | ||
4087 | 104 | an attempt to abort the calling script. | ||
4088 | 105 | """ | ||
4089 | 106 | was_imported = 'pkg_resources' in sys.modules \ | ||
4090 | 107 | or 'setuptools' in sys.modules | ||
4091 | 108 | |||
4092 | 109 | def do_download(): | ||
4093 | 110 | egg = download_setuptools(version, download_base, to_dir, | ||
4094 | 111 | download_delay) | ||
4095 | 112 | sys.path.insert(0, egg) | ||
4096 | 113 | import setuptools | ||
4097 | 114 | setuptools.bootstrap_install_from = egg | ||
4098 | 115 | try: | ||
4099 | 116 | import pkg_resources | ||
4100 | 117 | except ImportError: | ||
4101 | 118 | return do_download() | ||
4102 | 119 | try: | ||
4103 | 120 | pkg_resources.require("setuptools>=" + version) | ||
4104 | 121 | return | ||
4105 | 122 | except pkg_resources.VersionConflict, e: | ||
4106 | 123 | if was_imported: | ||
4107 | 124 | print >>sys.stderr, ( | ||
4108 | 125 | "The required version of setuptools (>=%s) is not available, and\n" | ||
4109 | 126 | "can't be installed while this script is running. Please install\n" | ||
4110 | 127 | " a more recent version first, using 'easy_install -U setuptools'." | ||
4111 | 128 | "\n\n(Currently using %r)" | ||
4112 | 129 | ) % (version, e.args[0]) | ||
4113 | 130 | sys.exit(2) | ||
4114 | 131 | else: | ||
4115 | 132 | del pkg_resources, sys.modules['pkg_resources'] # reload ok | ||
4116 | 133 | return do_download() | ||
4117 | 134 | except pkg_resources.DistributionNotFound: | ||
4118 | 135 | return do_download() | ||
4119 | 136 | |||
4120 | 137 | |||
4121 | 138 | def download_setuptools( | ||
4122 | 139 | version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, | ||
4123 | 140 | delay=15 | ||
4124 | 141 | ): | ||
4125 | 142 | """Download setuptools from a specified location and return its filename | ||
4126 | 143 | |||
4127 | 144 | `version` should be a valid setuptools version number that is available | ||
4128 | 145 | as an egg for download under the `download_base` URL (which should end | ||
4129 | 146 | with a '/'). `to_dir` is the directory where the egg will be downloaded. | ||
4130 | 147 | `delay` is the number of seconds to pause before an actual download | ||
4131 | 148 | attempt. | ||
4132 | 149 | """ | ||
4133 | 150 | import urllib2 | ||
4134 | 151 | import shutil | ||
4135 | 152 | egg_name = "setuptools-%s-py%s.egg" % (version, sys.version[:3]) | ||
4136 | 153 | url = download_base + egg_name | ||
4137 | 154 | saveto = os.path.join(to_dir, egg_name) | ||
4138 | 155 | src = dst = None | ||
4139 | 156 | if not os.path.exists(saveto): # Avoid repeated downloads | ||
4140 | 157 | try: | ||
4141 | 158 | from distutils import log | ||
4142 | 159 | if delay: | ||
4143 | 160 | log.warn(""" | ||
4144 | 161 | --------------------------------------------------------------------------- | ||
4145 | 162 | This script requires setuptools version %s to run (even to display | ||
4146 | 163 | help). I will attempt to download it for you (from | ||
4147 | 164 | %s), but | ||
4148 | 165 | you may need to enable firewall access for this script first. | ||
4149 | 166 | I will start the download in %d seconds. | ||
4150 | 167 | |||
4151 | 168 | (Note: if this machine does not have network access, please obtain the file | ||
4152 | 169 | |||
4153 | 170 | %s | ||
4154 | 171 | |||
4155 | 172 | and place it in this directory before rerunning this script.) | ||
4156 | 173 | ---------------------------------------------------------------------------""", | ||
4157 | 174 | version, download_base, delay, url | ||
4158 | 175 | ) | ||
4159 | 176 | from time import sleep | ||
4160 | 177 | sleep(delay) | ||
4161 | 178 | log.warn("Downloading %s", url) | ||
4162 | 179 | src = urllib2.urlopen(url) | ||
4163 | 180 | # Read/write all in one block, so we don't create a corrupt file | ||
4164 | 181 | # if the download is interrupted. | ||
4165 | 182 | data = _validate_md5(egg_name, src.read()) | ||
4166 | 183 | dst = open(saveto, "wb") | ||
4167 | 184 | dst.write(data) | ||
4168 | 185 | finally: | ||
4169 | 186 | if src: | ||
4170 | 187 | src.close() | ||
4171 | 188 | if dst: | ||
4172 | 189 | dst.close() | ||
4173 | 190 | return os.path.realpath(saveto) | ||
4174 | 191 | |||
4175 | 192 | |||
4176 | 193 | def main(argv, version=DEFAULT_VERSION): | ||
4177 | 194 | """Install or upgrade setuptools and EasyInstall""" | ||
4178 | 195 | try: | ||
4179 | 196 | import setuptools | ||
4180 | 197 | except ImportError: | ||
4181 | 198 | egg = None | ||
4182 | 199 | try: | ||
4183 | 200 | egg = download_setuptools(version, delay=0) | ||
4184 | 201 | sys.path.insert(0, egg) | ||
4185 | 202 | from setuptools.command.easy_install import main | ||
4186 | 203 | return main(list(argv) + [egg]) # we're done here | ||
4187 | 204 | finally: | ||
4188 | 205 | if egg and os.path.exists(egg): | ||
4189 | 206 | os.unlink(egg) | ||
4190 | 207 | else: | ||
4191 | 208 | if setuptools.__version__ == '0.0.1': | ||
4192 | 209 | print >>sys.stderr, ( | ||
4193 | 210 | "You have an obsolete version of setuptools installed. Please\n" | ||
4194 | 211 | "remove it from your system entirely before rerunning this script." | ||
4195 | 212 | ) | ||
4196 | 213 | sys.exit(2) | ||
4197 | 214 | |||
4198 | 215 | req = "setuptools>=" + version | ||
4199 | 216 | import pkg_resources | ||
4200 | 217 | try: | ||
4201 | 218 | pkg_resources.require(req) | ||
4202 | 219 | except pkg_resources.VersionConflict: | ||
4203 | 220 | try: | ||
4204 | 221 | from setuptools.command.easy_install import main | ||
4205 | 222 | except ImportError: | ||
4206 | 223 | from easy_install import main | ||
4207 | 224 | main(list(argv) + [download_setuptools(delay=0)]) | ||
4208 | 225 | sys.exit(0) # try to force an exit | ||
4209 | 226 | else: | ||
4210 | 227 | if argv: | ||
4211 | 228 | from setuptools.command.easy_install import main | ||
4212 | 229 | main(argv) | ||
4213 | 230 | else: | ||
4214 | 231 | print "Setuptools version", version, "or greater has been " \ | ||
4215 | 232 | + "installed." | ||
4216 | 233 | print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)' | ||
4217 | 234 | |||
4218 | 235 | |||
4219 | 236 | def update_md5(filenames): | ||
4220 | 237 | """Update our built-in md5 registry""" | ||
4221 | 238 | |||
4222 | 239 | import re | ||
4223 | 240 | |||
4224 | 241 | for name in filenames: | ||
4225 | 242 | base = os.path.basename(name) | ||
4226 | 243 | f = open(name, 'rb') | ||
4227 | 244 | md5_data[base] = md5(f.read()).hexdigest() | ||
4228 | 245 | f.close() | ||
4229 | 246 | |||
4230 | 247 | data = [" %r: %r,\n" % it for it in md5_data.items()] | ||
4231 | 248 | data.sort() | ||
4232 | 249 | repl = "".join(data) | ||
4233 | 250 | |||
4234 | 251 | import inspect | ||
4235 | 252 | srcfile = inspect.getsourcefile(sys.modules[__name__]) | ||
4236 | 253 | f = open(srcfile, 'rb') | ||
4237 | 254 | src = f.read() | ||
4238 | 255 | f.close() | ||
4239 | 256 | |||
4240 | 257 | match = re.search("\nmd5_data = {\n([^}]+)}", src) | ||
4241 | 258 | if not match: | ||
4242 | 259 | print >>sys.stderr, "Internal error!" | ||
4243 | 260 | sys.exit(2) | ||
4244 | 261 | |||
4245 | 262 | src = src[:match.start(1)] + repl + src[match.end(1):] | ||
4246 | 263 | f = open(srcfile, 'w') | ||
4247 | 264 | f.write(src) | ||
4248 | 265 | f.close() | ||
4249 | 266 | |||
4250 | 267 | |||
4251 | 268 | if __name__ == '__main__': | ||
4252 | 269 | if len(sys.argv) > 2 and sys.argv[1] == '--md5update': | ||
4253 | 270 | update_md5(sys.argv[2:]) | ||
4254 | 271 | else: | ||
4255 | 272 | main(sys.argv[1:]) | ||
4256 | 273 | 0 | ||
4257 | === modified file 'helpers/python/charmhelpers/tests/test_charmhelpers.py' | |||
4258 | --- helpers/python/charmhelpers/tests/test_charmhelpers.py 2013-08-21 04:04:39 +0000 | |||
4259 | +++ helpers/python/charmhelpers/tests/test_charmhelpers.py 2015-08-31 19:32:56 +0000 | |||
4260 | @@ -3,7 +3,7 @@ | |||
4261 | 3 | import unittest | 3 | import unittest |
4262 | 4 | import yaml | 4 | import yaml |
4263 | 5 | 5 | ||
4265 | 6 | from simplejson import dumps | 6 | from json import dumps |
4266 | 7 | from StringIO import StringIO | 7 | from StringIO import StringIO |
4267 | 8 | from testtools import TestCase | 8 | from testtools import TestCase |
4268 | 9 | 9 | ||
4269 | 10 | 10 | ||
4270 | === modified file 'requirements.txt' | |||
4271 | --- requirements.txt 2015-08-24 16:35:22 +0000 | |||
4272 | +++ requirements.txt 2015-08-31 19:32:56 +0000 | |||
4273 | @@ -1,17 +1,22 @@ | |||
4274 | 1 | PyYAML==3.11 | 1 | PyYAML==3.11 |
4275 | 2 | blessings==1.6 | ||
4276 | 3 | bundletester==0.5.2 | ||
4277 | 4 | bzr>=2.6.0 | ||
4278 | 5 | charmworldlib>=0.4.2 | ||
4279 | 6 | coverage==3.7.1 | ||
4280 | 7 | flake8==1.6.2 | ||
4281 | 8 | httplib2==0.7.7 | ||
4282 | 9 | juju-deployer==0.4.3 | ||
4283 | 10 | jujubundlelib>=0.1.9 | ||
4284 | 11 | jujuclient==0.50.1 | ||
4285 | 2 | launchpadlib==1.10.2 | 12 | launchpadlib==1.10.2 |
4286 | 13 | mock==1.0.1 | ||
4287 | 3 | nose==1.2.1 | 14 | nose==1.2.1 |
4288 | 4 | requests==1.1.0 | ||
4289 | 5 | lazr.authentication==0.1.2 | ||
4290 | 6 | lazr.restfulclient==0.13.1 | ||
4291 | 7 | lazr.uri==1.0.3 | ||
4292 | 8 | simplejson==2.2.1 | ||
4293 | 9 | wadllib==1.3.1 | ||
4294 | 10 | httplib2==0.7.7 | ||
4295 | 11 | oauth==1.0.1 | 15 | oauth==1.0.1 |
4302 | 12 | flake8==1.6.2 | 16 | otherstuf==1.1.0 |
4303 | 13 | mock==1.0.1 | 17 | path.py==7.4 |
4304 | 14 | coverage==3.7.1 | 18 | pathspec==0.3.3 |
4305 | 15 | charmworldlib>=0.3.0 | 19 | pip>=7.1.2 |
4306 | 16 | bzr>=2.6.0 | 20 | requests==2.7.0 |
4307 | 17 | jujubundlelib>=0.1.9 | 21 | responses==0.4.0 |
4308 | 22 | ruamel.yaml==0.10.2 | ||
4309 | 18 | 23 | ||
4310 | === added directory 'scripts' | |||
4311 | === removed directory 'scripts' | |||
4312 | === added file 'scripts/packages.sh' | |||
4313 | --- scripts/packages.sh 1970-01-01 00:00:00 +0000 | |||
4314 | +++ scripts/packages.sh 2015-08-31 19:32:56 +0000 | |||
4315 | @@ -0,0 +1,19 @@ | |||
4316 | 1 | #!/bin/bash | ||
4317 | 2 | |||
4318 | 3 | function apt_install() { | ||
4319 | 4 | packages=$@ | ||
4320 | 5 | missing=() | ||
4321 | 6 | for p in $packages; do | ||
4322 | 7 | if ! dpkg-query -s $p &> /dev/null; then | ||
4323 | 8 | missing+=($p) | ||
4324 | 9 | fi | ||
4325 | 10 | done | ||
4326 | 11 | if [ -n "${missing}" ]; then | ||
4327 | 12 | sudo apt-get update | ||
4328 | 13 | sudo apt-get install -y ${missing} | ||
4329 | 14 | return 1 | ||
4330 | 15 | fi | ||
4331 | 16 | return 0 | ||
4332 | 17 | } | ||
4333 | 18 | apt_install $@ | ||
4334 | 19 | exit $? | ||
4335 | 0 | 20 | ||
4336 | === removed file 'scripts/test' | |||
4337 | --- scripts/test 2014-05-27 21:23:41 +0000 | |||
4338 | +++ scripts/test 1970-01-01 00:00:00 +0000 | |||
4339 | @@ -1,7 +0,0 @@ | |||
4340 | 1 | #!/bin/bash | ||
4341 | 2 | # If the --pdb switch is passed, inject --pdb-failures too. | ||
4342 | 3 | if [[ $* =~ --pdb( .*|$) ]] | ||
4343 | 4 | then | ||
4344 | 5 | extra_args="--pdb-failures" | ||
4345 | 6 | fi | ||
4346 | 7 | bin/nosetests --exe --with-id $extra_args $@ | ||
4347 | 8 | 0 | ||
4348 | === added file 'setup.cfg' | |||
4349 | --- setup.cfg 1970-01-01 00:00:00 +0000 | |||
4350 | +++ setup.cfg 2015-08-31 19:32:56 +0000 | |||
4351 | @@ -0,0 +1,8 @@ | |||
4352 | 1 | [nosetests] | ||
4353 | 2 | verbosity=1 | ||
4354 | 3 | detailed-errors=1 | ||
4355 | 4 | #pdb=1 | ||
4356 | 5 | #pdb-failures=1 | ||
4357 | 6 | logging-level=INFO | ||
4358 | 7 | |||
4359 | 8 | |||
4360 | 0 | 9 | ||
4361 | === modified file 'setup.py' | |||
4362 | --- setup.py 2015-08-24 16:35:22 +0000 | |||
4363 | +++ setup.py 2015-08-31 19:32:56 +0000 | |||
4364 | @@ -3,11 +3,6 @@ | |||
4365 | 3 | # Copyright 2012 Canonical Ltd. This software is licensed under the | 3 | # Copyright 2012 Canonical Ltd. This software is licensed under the |
4366 | 4 | # GNU General Public License version 3 (see the file LICENSE). | 4 | # GNU General Public License version 3 (see the file LICENSE). |
4367 | 5 | 5 | ||
4368 | 6 | import ez_setup | ||
4369 | 7 | |||
4370 | 8 | |||
4371 | 9 | ez_setup.use_setuptools() | ||
4372 | 10 | |||
4373 | 11 | from setuptools import setup, find_packages | 6 | from setuptools import setup, find_packages |
4374 | 12 | 7 | ||
4375 | 13 | 8 | ||
4376 | @@ -18,7 +13,9 @@ | |||
4377 | 18 | exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), | 13 | exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), |
4378 | 19 | install_requires=['launchpadlib', 'argparse', 'cheetah', 'pyyaml', | 14 | install_requires=['launchpadlib', 'argparse', 'cheetah', 'pyyaml', |
4379 | 20 | 'pycrypto', 'paramiko', 'bzr', 'requests', | 15 | 'pycrypto', 'paramiko', 'bzr', 'requests', |
4381 | 21 | 'charmworldlib', 'jujubundlelib'], | 16 | 'charmworldlib', 'blessings', 'ruamel.yaml', |
4382 | 17 | 'pathspec', 'bundletester', 'otherstuf', "path.py", | ||
4383 | 18 | "jujubundlelib"], | ||
4384 | 22 | include_package_data=True, | 19 | include_package_data=True, |
4385 | 23 | maintainer='Marco Ceppi', | 20 | maintainer='Marco Ceppi', |
4386 | 24 | maintainer_email='marco@ceppi.net', | 21 | maintainer_email='marco@ceppi.net', |
4387 | @@ -33,27 +30,30 @@ | |||
4388 | 33 | entry_points={ | 30 | entry_points={ |
4389 | 34 | 'console_scripts': [ | 31 | 'console_scripts': [ |
4390 | 35 | 'charm = charmtools:charm', | 32 | 'charm = charmtools:charm', |
4394 | 36 | 'juju-charm = charmtools:charm', | 33 | 'charm-add = charmtools.generate:main', |
4395 | 37 | 'juju-bundle = charmtools:bundle', | 34 | 'charm-compose = charmtools.compose:main', |
4396 | 38 | 'juju-test = charmtools.test:main', | 35 | 'charm-create = charmtools.create:main', |
4397 | 36 | 'charm-generate = charmtools.generate:main', | ||
4398 | 39 | 'charm-get = charmtools.get:main', | 37 | 'charm-get = charmtools.get:main', |
4399 | 40 | 'charm-getall = charmtools.getall:main', | 38 | 'charm-getall = charmtools.getall:main', |
4402 | 41 | 'charm-proof = charmtools.proof:main', | 39 | 'charm-help = charmtools.cli:usage', |
4403 | 42 | 'charm-create = charmtools.create:main', | 40 | 'charm-info = charmtools.info:main', |
4404 | 41 | 'charm-inspect = charmtools.compose:inspect', | ||
4405 | 43 | 'charm-list = charmtools.list:main', | 42 | 'charm-list = charmtools.list:main', |
4406 | 44 | 'charm-promulgate = charmtools.promulgate:main', | 43 | 'charm-promulgate = charmtools.promulgate:main', |
4407 | 44 | 'charm-proof = charmtools.proof:main', | ||
4408 | 45 | 'charm-refresh = charmtools.compose:main', | ||
4409 | 45 | 'charm-review = charmtools.review:main', | 46 | 'charm-review = charmtools.review:main', |
4410 | 46 | 'charm-review-queue = charmtools.review_queue:main', | 47 | 'charm-review-queue = charmtools.review_queue:main', |
4411 | 47 | 'charm-search = charmtools.search:main', | 48 | 'charm-search = charmtools.search:main', |
4412 | 48 | 'charm-subscribers = charmtools.subscribers:main', | 49 | 'charm-subscribers = charmtools.subscribers:main', |
4413 | 50 | 'charm-test = charmtools.test:main', | ||
4414 | 49 | 'charm-unpromulgate = charmtools.unpromulgate:main', | 51 | 'charm-unpromulgate = charmtools.unpromulgate:main', |
4415 | 50 | 'charm-update = charmtools.update:main', | 52 | 'charm-update = charmtools.update:main', |
4416 | 51 | 'charm-version = charmtools.version:main', | 53 | 'charm-version = charmtools.version:main', |
4422 | 52 | 'charm-help = charmtools.cli:usage', | 54 | 'juju-bundle = charmtools:bundle', |
4423 | 53 | 'charm-test = charmtools.test:main', | 55 | 'juju-charm = charmtools:charm', |
4424 | 54 | 'charm-info = charmtools.info:main', | 56 | 'juju-test = charmtools.test:main', |
4420 | 55 | 'charm-generate = charmtools.generate:main', | ||
4421 | 56 | 'charm-add = charmtools.generate:main', | ||
4425 | 57 | ], | 57 | ], |
4426 | 58 | 'charmtools.templates': [ | 58 | 'charmtools.templates': [ |
4427 | 59 | 'bash = charmtools.templates.bash:BashCharmTemplate', | 59 | 'bash = charmtools.templates.bash:BashCharmTemplate', |
4428 | 60 | 60 | ||
4429 | === added directory 'tests/interfaces' | |||
4430 | === added directory 'tests/interfaces/mysql' | |||
4431 | === added file 'tests/interfaces/mysql/interface.yaml' | |||
4432 | --- tests/interfaces/mysql/interface.yaml 1970-01-01 00:00:00 +0000 | |||
4433 | +++ tests/interfaces/mysql/interface.yaml 2015-08-31 19:32:56 +0000 | |||
4434 | @@ -0,0 +1,1 @@ | |||
4435 | 1 | name: mysql | ||
4436 | 0 | 2 | ||
4437 | === added file 'tests/interfaces/mysql/provides.py' | |||
4438 | --- tests/interfaces/mysql/provides.py 1970-01-01 00:00:00 +0000 | |||
4439 | +++ tests/interfaces/mysql/provides.py 2015-08-31 19:32:56 +0000 | |||
4440 | @@ -0,0 +1,1 @@ | |||
4441 | 1 | "provides" | ||
4442 | 0 | 2 | ||
4443 | === added file 'tests/interfaces/mysql/requires.py' | |||
4444 | --- tests/interfaces/mysql/requires.py 1970-01-01 00:00:00 +0000 | |||
4445 | +++ tests/interfaces/mysql/requires.py 2015-08-31 19:32:56 +0000 | |||
4446 | @@ -0,0 +1,1 @@ | |||
4447 | 1 | "requires" | ||
4448 | 0 | 2 | ||
4449 | === modified file 'tests/test_charm_generate.py' | |||
4450 | --- tests/test_charm_generate.py 2014-11-05 22:09:11 +0000 | |||
4451 | +++ tests/test_charm_generate.py 2015-08-31 19:32:56 +0000 | |||
4452 | @@ -24,13 +24,6 @@ | |||
4453 | 24 | 'provides': 'nointerface'}) | 24 | 'provides': 'nointerface'}) |
4454 | 25 | 25 | ||
4455 | 26 | @patch('charmtools.generate.Charm') | 26 | @patch('charmtools.generate.Charm') |
4456 | 27 | @patch('charmtools.generate.shutil') | ||
4457 | 28 | def test_copy_file(self, msh, mcharm): | ||
4458 | 29 | m = mcharm.return_value.is_charm.return_value = True | ||
4459 | 30 | copy_file('1.ex', '/tmp') | ||
4460 | 31 | msh.copy.assert_called() | ||
4461 | 32 | |||
4462 | 33 | @patch('charmtools.generate.Charm') | ||
4463 | 34 | def test_not_charm(self, mcharm): | 27 | def test_not_charm(self, mcharm): |
4464 | 35 | mcharm.return_value.is_charm.return_value = False | 28 | mcharm.return_value.is_charm.return_value = False |
4465 | 36 | self.assertRaises(Exception, copy_file, '1.ex', '/no-charm') | 29 | self.assertRaises(Exception, copy_file, '1.ex', '/no-charm') |
4466 | 37 | 30 | ||
4467 | === added file 'tests/test_compose.py' | |||
4468 | --- tests/test_compose.py 1970-01-01 00:00:00 +0000 | |||
4469 | +++ tests/test_compose.py 2015-08-31 19:32:56 +0000 | |||
4470 | @@ -0,0 +1,239 @@ | |||
4471 | 1 | from charmtools import compose | ||
4472 | 2 | from charmtools import utils | ||
4473 | 3 | from path import path | ||
4474 | 4 | from ruamel import yaml | ||
4475 | 5 | import json | ||
4476 | 6 | import logging | ||
4477 | 7 | import mock | ||
4478 | 8 | import os | ||
4479 | 9 | import pkg_resources | ||
4480 | 10 | import responses | ||
4481 | 11 | import unittest | ||
4482 | 12 | |||
4483 | 13 | |||
4484 | 14 | class TestCompose(unittest.TestCase): | ||
4485 | 15 | def setUp(self): | ||
4486 | 16 | dirname = pkg_resources.resource_filename(__name__, "") | ||
4487 | 17 | os.environ["COMPOSER_PATH"] = path(dirname) | ||
4488 | 18 | os.environ["INTERFACE_PATH"] = path(dirname) / "interfaces" | ||
4489 | 19 | path("out").rmtree_p() | ||
4490 | 20 | |||
4491 | 21 | def tearDown(self): | ||
4492 | 22 | path("out").rmtree_p() | ||
4493 | 23 | |||
4494 | 24 | def test_tester_compose(self): | ||
4495 | 25 | composer = compose.Composer() | ||
4496 | 26 | composer.log_level = "WARNING" | ||
4497 | 27 | composer.output_dir = "out" | ||
4498 | 28 | composer.series = "trusty" | ||
4499 | 29 | composer.name = "foo" | ||
4500 | 30 | composer.charm = "trusty/tester" | ||
4501 | 31 | composer() | ||
4502 | 32 | base = path('out/trusty/foo') | ||
4503 | 33 | self.assertTrue(base.exists()) | ||
4504 | 34 | |||
4505 | 35 | # Verify ignore rules applied | ||
4506 | 36 | self.assertFalse((base / ".bzr").exists()) | ||
4507 | 37 | |||
4508 | 38 | # Metadata should have combined provides fields | ||
4509 | 39 | metadata = base / "metadata.yaml" | ||
4510 | 40 | self.assertTrue(metadata.exists()) | ||
4511 | 41 | metadata_data = yaml.load(metadata.open()) | ||
4512 | 42 | self.assertIn("shared-db", metadata_data['provides']) | ||
4513 | 43 | self.assertIn("storage", metadata_data['provides']) | ||
4514 | 44 | |||
4515 | 45 | # Config should have keys but not the ones in deletes | ||
4516 | 46 | config = base / "config.yaml" | ||
4517 | 47 | self.assertTrue(config.exists()) | ||
4518 | 48 | config_data = yaml.load(config.open())['options'] | ||
4519 | 49 | self.assertIn("bind-address", config_data) | ||
4520 | 50 | self.assertNotIn("vip", config_data) | ||
4521 | 51 | |||
4522 | 52 | cyaml = base / "composer.yaml" | ||
4523 | 53 | self.assertTrue(cyaml.exists()) | ||
4524 | 54 | cyaml_data = yaml.load(cyaml.open()) | ||
4525 | 55 | self.assertEquals(cyaml_data['includes'], ['trusty/mysql']) | ||
4526 | 56 | self.assertEquals(cyaml_data['is'], 'foo') | ||
4527 | 57 | |||
4528 | 58 | self.assertTrue((base / "hooks/config-changed").exists()) | ||
4529 | 59 | |||
4530 | 60 | # Files from the top layer as overrides | ||
4531 | 61 | start = base / "hooks/start" | ||
4532 | 62 | self.assertTrue(start.exists()) | ||
4533 | 63 | self.assertIn("Overridden", start.text()) | ||
4534 | 64 | |||
4535 | 65 | self.assertTrue((base / "README.md").exists()) | ||
4536 | 66 | self.assertEqual("dynamic tactics", (base / "README.md").text()) | ||
4537 | 67 | |||
4538 | 68 | sigs = base / ".composer.manifest" | ||
4539 | 69 | self.assertTrue(sigs.exists()) | ||
4540 | 70 | data = json.load(sigs.open()) | ||
4541 | 71 | self.assertEquals(data['signatures']["README.md"], [ | ||
4542 | 72 | u'foo', | ||
4543 | 73 | "static", | ||
4544 | 74 | u'cfac20374288c097975e9f25a0d7c81783acdbc81' | ||
4545 | 75 | '24302ff4a731a4aea10de99']) | ||
4546 | 76 | |||
4547 | 77 | self.assertEquals(data["signatures"]['metadata.yaml'], [ | ||
4548 | 78 | u'foo', | ||
4549 | 79 | "dynamic", | ||
4550 | 80 | u'8dd9059eae849c61a1bd3d8de7f96a418e' | ||
4551 | 81 | u'f8b4bf5d9c058c413b5169e2783815', | ||
4552 | 82 | ]) | ||
4553 | 83 | |||
4554 | 84 | def test_regenerate_inplace(self): | ||
4555 | 85 | # take a generated example where a base layer has changed | ||
4556 | 86 | # regenerate in place | ||
4557 | 87 | # make some assertions | ||
4558 | 88 | composer = compose.Composer() | ||
4559 | 89 | composer.log_level = "WARNING" | ||
4560 | 90 | composer.output_dir = "out" | ||
4561 | 91 | composer.series = "trusty" | ||
4562 | 92 | composer.name = "foo" | ||
4563 | 93 | composer.charm = "trusty/b" | ||
4564 | 94 | composer() | ||
4565 | 95 | base = path('out/trusty/foo') | ||
4566 | 96 | self.assertTrue(base.exists()) | ||
4567 | 97 | |||
4568 | 98 | # verify the 1st gen worked | ||
4569 | 99 | self.assertTrue((base / "a").exists()) | ||
4570 | 100 | self.assertTrue((base / "README.md").exists()) | ||
4571 | 101 | |||
4572 | 102 | # now regenerate from the target | ||
4573 | 103 | with utils.cd("out/trusty/foo"): | ||
4574 | 104 | composer = compose.Composer() | ||
4575 | 105 | composer.log_level = "WARNING" | ||
4576 | 106 | composer.output_dir = path(os.getcwd()) | ||
4577 | 107 | composer.series = "trusty" | ||
4578 | 108 | # The generate target and source are now the same | ||
4579 | 109 | composer.name = "foo" | ||
4580 | 110 | composer.charm = "." | ||
4581 | 111 | composer() | ||
4582 | 112 | base = composer.output_dir | ||
4583 | 113 | self.assertTrue(base.exists()) | ||
4584 | 114 | |||
4585 | 115 | # Check that the generated composer makes sense | ||
4586 | 116 | cy = base / "composer.yaml" | ||
4587 | 117 | config = yaml.load(cy.open()) | ||
4588 | 118 | self.assertEquals(config["includes"], ["trusty/a", "interface:mysql"]) | ||
4589 | 119 | self.assertEquals(config["is"], "foo") | ||
4590 | 120 | |||
4591 | 121 | # We can even run it more than once | ||
4592 | 122 | composer() | ||
4593 | 123 | cy = base / "composer.yaml" | ||
4594 | 124 | config = yaml.load(cy.open()) | ||
4595 | 125 | self.assertEquals(config["includes"], ["trusty/a", "interface:mysql"]) | ||
4596 | 126 | self.assertEquals(config["is"], "foo") | ||
4597 | 127 | |||
4598 | 128 | # We included an interface, we should be able to assert things about it | ||
4599 | 129 | # in its final form as well | ||
4600 | 130 | provides = base / "hooks/relations/mysql/provides.py" | ||
4601 | 131 | requires = base / "hooks/relations/mysql/requires.py" | ||
4602 | 132 | self.assertTrue(provides.exists()) | ||
4603 | 133 | self.assertTrue(requires.exists()) | ||
4604 | 134 | |||
4605 | 135 | # and that we generated the hooks themselves | ||
4606 | 136 | for kind in ["joined", "changed", "broken", "departed"]: | ||
4607 | 137 | self.assertTrue((base / "hooks" / | ||
4608 | 138 | "mysql-relation-{}".format(kind)).exists()) | ||
4609 | 139 | |||
4610 | 140 | # and ensure we have an init file (the interface doesn't its added) | ||
4611 | 141 | init = base / "hooks/relations/mysql/__init__.py" | ||
4612 | 142 | self.assertTrue(init.exists()) | ||
4613 | 143 | |||
4614 | 144 | @responses.activate | ||
4615 | 145 | def test_remote_interface(self): | ||
4616 | 146 | # XXX: this test does pull the git repo in the response | ||
4617 | 147 | responses.add(responses.GET, | ||
4618 | 148 | "http://interfaces.juju.solutions/api/v1/interface/pgsql/", | ||
4619 | 149 | body='''{ | ||
4620 | 150 | "id": "pgsql", | ||
4621 | 151 | "name": "pgsql4", | ||
4622 | 152 | "repo": | ||
4623 | 153 | "https://github.com/bcsaller/juju-relation-pgsql.git", | ||
4624 | 154 | "_id": { | ||
4625 | 155 | "$oid": "55a471959c1d246feae487e5" | ||
4626 | 156 | }, | ||
4627 | 157 | "version": 1 | ||
4628 | 158 | }''', | ||
4629 | 159 | content_type="application/json") | ||
4630 | 160 | composer = compose.Composer() | ||
4631 | 161 | composer.log_level = "WARNING" | ||
4632 | 162 | composer.output_dir = "out" | ||
4633 | 163 | composer.series = "trusty" | ||
4634 | 164 | composer.name = "foo" | ||
4635 | 165 | composer.charm = "trusty/c-reactive" | ||
4636 | 166 | composer() | ||
4637 | 167 | base = path('out/trusty/foo') | ||
4638 | 168 | self.assertTrue(base.exists()) | ||
4639 | 169 | |||
4640 | 170 | # basics | ||
4641 | 171 | self.assertTrue((base / "a").exists()) | ||
4642 | 172 | self.assertTrue((base / "README.md").exists()) | ||
4643 | 173 | # show that we pulled the interface from github | ||
4644 | 174 | init = base / "hooks/relations/pgsql/__init__.py" | ||
4645 | 175 | self.assertTrue(init.exists()) | ||
4646 | 176 | main = base / "hooks/reactive/main.py" | ||
4647 | 177 | self.assertTrue(main.exists()) | ||
4648 | 178 | |||
4649 | 179 | @mock.patch("charmtools.utils.Process") | ||
4650 | 180 | @responses.activate | ||
4651 | 181 | def test_remote_layer(self, mcall): | ||
4652 | 182 | # XXX: this test does pull the git repo in the response | ||
4653 | 183 | responses.add(responses.GET, | ||
4654 | 184 | "http://interfaces.juju.solutions/api/v1/layer/basic/", | ||
4655 | 185 | body='''{ | ||
4656 | 186 | "id": "basic", | ||
4657 | 187 | "name": "basic", | ||
4658 | 188 | "repo": | ||
4659 | 189 | "https://git.launchpad.net/~bcsaller/charms/+source/basic", | ||
4660 | 190 | "_id": { | ||
4661 | 191 | "$oid": "55a471959c1d246feae487e5" | ||
4662 | 192 | }, | ||
4663 | 193 | "version": 1 | ||
4664 | 194 | }''', | ||
4665 | 195 | content_type="application/json") | ||
4666 | 196 | composer = compose.Composer() | ||
4667 | 197 | composer.log_level = "WARNING" | ||
4668 | 198 | composer.output_dir = "out" | ||
4669 | 199 | composer.series = "trusty" | ||
4670 | 200 | composer.name = "foo" | ||
4671 | 201 | composer.charm = "trusty/use-layers" | ||
4672 | 202 | # remove the sign phase | ||
4673 | 203 | composer.PHASES = composer.PHASES[:-2] | ||
4674 | 204 | |||
4675 | 205 | composer() | ||
4676 | 206 | base = path('out/trusty/foo') | ||
4677 | 207 | self.assertTrue(base.exists()) | ||
4678 | 208 | |||
4679 | 209 | # basics | ||
4680 | 210 | self.assertTrue((base / "README.md").exists()) | ||
4681 | 211 | |||
4682 | 212 | # show that we pulled charmhelpers from the basic layer as well | ||
4683 | 213 | mcall.assert_called_with(("pip", "install", "-U", | ||
4684 | 214 | '--exists-action', 'i', | ||
4685 | 215 | "-t", mock.ANY, | ||
4686 | 216 | mock.ANY)) | ||
4687 | 217 | |||
4688 | 218 | |||
4689 | 219 | @mock.patch("charmtools.utils.Process") | ||
4690 | 220 | def test_pypi_installer(self, mcall): | ||
4691 | 221 | composer = compose.Composer() | ||
4692 | 222 | composer.log_level = "WARN" | ||
4693 | 223 | composer.output_dir = "out" | ||
4694 | 224 | composer.series = "trusty" | ||
4695 | 225 | composer.name = "foo" | ||
4696 | 226 | composer.charm = "trusty/chlayer" | ||
4697 | 227 | |||
4698 | 228 | # remove the sign phase | ||
4699 | 229 | composer.PHASES = composer.PHASES[:-2] | ||
4700 | 230 | composer() | ||
4701 | 231 | mcall.assert_called_with(("pip", "install", "-U", | ||
4702 | 232 | '--exists-action', 'i', | ||
4703 | 233 | "-t", mock.ANY, | ||
4704 | 234 | "charmhelpers")) | ||
4705 | 235 | |||
4706 | 236 | |||
4707 | 237 | if __name__ == '__main__': | ||
4708 | 238 | logging.basicConfig() | ||
4709 | 239 | unittest.main() | ||
4710 | 0 | 240 | ||
4711 | === added file 'tests/test_config.py' | |||
4712 | --- tests/test_config.py 1970-01-01 00:00:00 +0000 | |||
4713 | +++ tests/test_config.py 2015-08-31 19:32:56 +0000 | |||
4714 | @@ -0,0 +1,29 @@ | |||
4715 | 1 | import logging | ||
4716 | 2 | import unittest | ||
4717 | 3 | |||
4718 | 4 | from charmtools.compose.config import ComposerConfig | ||
4719 | 5 | |||
4720 | 6 | |||
4721 | 7 | class TestConfig(unittest.TestCase): | ||
4722 | 8 | def test_rget(self): | ||
4723 | 9 | c = ComposerConfig() | ||
4724 | 10 | c['a'] = 1 | ||
4725 | 11 | c = c.new_child() | ||
4726 | 12 | c['a'] = 99 | ||
4727 | 13 | c['b'] = "alpha" | ||
4728 | 14 | self.assertEqual(c.get('a'), 99) | ||
4729 | 15 | self.assertEqual(c.get('b'), "alpha") | ||
4730 | 16 | self.assertEqual(c.rget('a'), [99, 1]) | ||
4731 | 17 | |||
4732 | 18 | def test_tactics(self): | ||
4733 | 19 | # configure from empty and a layer with tactics | ||
4734 | 20 | c = ComposerConfig() | ||
4735 | 21 | c._tactics = ['a', 'b', 'c'] | ||
4736 | 22 | c = c.new_child() | ||
4737 | 23 | c._tactics = ['d', 'c'] | ||
4738 | 24 | self.assertEqual(c.tactics()[:5], ['d', 'c', 'a', 'b', 'c']) | ||
4739 | 25 | |||
4740 | 26 | |||
4741 | 27 | if __name__ == '__main__': | ||
4742 | 28 | logging.basicConfig() | ||
4743 | 29 | unittest.main() | ||
4744 | 0 | 30 | ||
4745 | === modified file 'tests/test_juju_test.py' | |||
4746 | --- tests/test_juju_test.py 2014-06-10 21:26:52 +0000 | |||
4747 | +++ tests/test_juju_test.py 2015-08-31 19:32:56 +0000 | |||
4748 | @@ -714,15 +714,6 @@ | |||
4749 | 714 | call('Failed to grab logs for dummy/0')] | 714 | call('Failed to grab logs for dummy/0')] |
4750 | 715 | o.log.warn.assert_has_calls(expected_warns) | 715 | o.log.warn.assert_has_calls(expected_warns) |
4751 | 716 | 716 | ||
4752 | 717 | @patch('subprocess.check_output') | ||
4753 | 718 | @patch.object(juju_test.Orchestra, 'print_status') | ||
4754 | 719 | def test_orchestra_perform(self, mprint_status, mcheck_output): | ||
4755 | 720 | args = Arguments(tests='dummy', juju_env='testing', timeout=1) | ||
4756 | 721 | c = juju_test.Conductor(args) | ||
4757 | 722 | o = juju_test.Orchestra(c, 'test/dummy') | ||
4758 | 723 | o.perform() | ||
4759 | 724 | mprint_status.assert_called_once() | ||
4760 | 725 | |||
4761 | 726 | 717 | ||
4762 | 727 | class TestCfgTest(unittest.TestCase): | 718 | class TestCfgTest(unittest.TestCase): |
4763 | 728 | test_config = '''\ | 719 | test_config = '''\ |
4764 | 729 | 720 | ||
4765 | === added file 'tests/test_utils.py' | |||
4766 | --- tests/test_utils.py 1970-01-01 00:00:00 +0000 | |||
4767 | +++ tests/test_utils.py 2015-08-31 19:32:56 +0000 | |||
4768 | @@ -0,0 +1,43 @@ | |||
4769 | 1 | from unittest import TestCase | ||
4770 | 2 | from charmtools import utils | ||
4771 | 3 | from StringIO import StringIO | ||
4772 | 4 | |||
4773 | 5 | |||
4774 | 6 | class TestUtils(TestCase): | ||
4775 | 7 | |||
4776 | 8 | def test_delta_python(self): | ||
4777 | 9 | a = StringIO(""" | ||
4778 | 10 | def foo(n): | ||
4779 | 11 | return n * 2 | ||
4780 | 12 | |||
4781 | 13 | |||
4782 | 14 | @when('db.ready') | ||
4783 | 15 | def react(db): | ||
4784 | 16 | print db | ||
4785 | 17 | """) | ||
4786 | 18 | |||
4787 | 19 | b = StringIO(""" | ||
4788 | 20 | def foo(n): | ||
4789 | 21 | return n * 2 | ||
4790 | 22 | |||
4791 | 23 | |||
4792 | 24 | @when('db.ready', 'bar') | ||
4793 | 25 | def react(db): | ||
4794 | 26 | print db | ||
4795 | 27 | """) | ||
4796 | 28 | |||
4797 | 29 | result = StringIO() | ||
4798 | 30 | t = utils.TermWriter(fp=result) | ||
4799 | 31 | rc = utils.delta_python_dump(a, b, utils.REACTIVE_PATTERNS, | ||
4800 | 32 | context=3, | ||
4801 | 33 | term=t, | ||
4802 | 34 | from_name="Alpha", | ||
4803 | 35 | to_name="Beta") | ||
4804 | 36 | # return code here indicates that there was a diff | ||
4805 | 37 | self.assertFalse(rc) | ||
4806 | 38 | result.seek(0) | ||
4807 | 39 | output = result.read() | ||
4808 | 40 | self.assertIn("Alpha", output) | ||
4809 | 41 | self.assertIn("Beta", output) | ||
4810 | 42 | self.assertIn("@when('db.ready'", output) | ||
4811 | 43 | self.assertIn("bar", output) | ||
4812 | 0 | 44 | ||
4813 | === added directory 'tests/trusty' | |||
4814 | === added directory 'tests/trusty/a' | |||
4815 | === added file 'tests/trusty/a/README.md' | |||
4816 | --- tests/trusty/a/README.md 1970-01-01 00:00:00 +0000 | |||
4817 | +++ tests/trusty/a/README.md 2015-08-31 19:32:56 +0000 | |||
4818 | @@ -0,0 +1,1 @@ | |||
4819 | 1 | From A | ||
4820 | 0 | 2 | ||
4821 | === added file 'tests/trusty/a/a' | |||
4822 | --- tests/trusty/a/a 1970-01-01 00:00:00 +0000 | |||
4823 | +++ tests/trusty/a/a 2015-08-31 19:32:56 +0000 | |||
4824 | @@ -0,0 +1,1 @@ | |||
4825 | 1 | from a | ||
4826 | 0 | 2 | ||
4827 | === added directory 'tests/trusty/b' | |||
4828 | === added file 'tests/trusty/b/README.md' | |||
4829 | --- tests/trusty/b/README.md 1970-01-01 00:00:00 +0000 | |||
4830 | +++ tests/trusty/b/README.md 2015-08-31 19:32:56 +0000 | |||
4831 | @@ -0,0 +1,1 @@ | |||
4832 | 1 | This is an overridden readme file | ||
4833 | 0 | 2 | ||
4834 | === added file 'tests/trusty/b/composer.yaml' | |||
4835 | --- tests/trusty/b/composer.yaml 1970-01-01 00:00:00 +0000 | |||
4836 | +++ tests/trusty/b/composer.yaml 2015-08-31 19:32:56 +0000 | |||
4837 | @@ -0,0 +1,1 @@ | |||
4838 | 1 | includes: ["trusty/a", "interface:mysql"] | ||
4839 | 0 | 2 | ||
4840 | === added file 'tests/trusty/b/metadata.yaml' | |||
4841 | --- tests/trusty/b/metadata.yaml 1970-01-01 00:00:00 +0000 | |||
4842 | +++ tests/trusty/b/metadata.yaml 2015-08-31 19:32:56 +0000 | |||
4843 | @@ -0,0 +1,11 @@ | |||
4844 | 1 | name: b | ||
4845 | 2 | summary: An imagined extension to the a charm | ||
4846 | 3 | maintainer: None | ||
4847 | 4 | description: | | ||
4848 | 5 | Test layer b | ||
4849 | 6 | categories: | ||
4850 | 7 | - app | ||
4851 | 8 | requires: | ||
4852 | 9 | mysql: | ||
4853 | 10 | interface: mysql | ||
4854 | 11 | |||
4855 | 0 | 12 | ||
4856 | === added directory 'tests/trusty/c' | |||
4857 | === added directory 'tests/trusty/c-reactive' | |||
4858 | === added file 'tests/trusty/c-reactive/README.md' | |||
4859 | --- tests/trusty/c-reactive/README.md 1970-01-01 00:00:00 +0000 | |||
4860 | +++ tests/trusty/c-reactive/README.md 2015-08-31 19:32:56 +0000 | |||
4861 | @@ -0,0 +1,1 @@ | |||
4862 | 1 | This is an overridden readme file | ||
4863 | 0 | 2 | ||
4864 | === added file 'tests/trusty/c-reactive/composer.yaml' | |||
4865 | --- tests/trusty/c-reactive/composer.yaml 1970-01-01 00:00:00 +0000 | |||
4866 | +++ tests/trusty/c-reactive/composer.yaml 2015-08-31 19:32:56 +0000 | |||
4867 | @@ -0,0 +1,1 @@ | |||
4868 | 1 | includes: ["trusty/c"] | ||
4869 | 0 | 2 | ||
4870 | === added directory 'tests/trusty/c-reactive/hooks' | |||
4871 | === added directory 'tests/trusty/c-reactive/hooks/reactive' | |||
4872 | === added file 'tests/trusty/c-reactive/hooks/reactive/main.py' | |||
4873 | --- tests/trusty/c-reactive/hooks/reactive/main.py 1970-01-01 00:00:00 +0000 | |||
4874 | +++ tests/trusty/c-reactive/hooks/reactive/main.py 2015-08-31 19:32:56 +0000 | |||
4875 | @@ -0,0 +1,6 @@ | |||
4876 | 1 | from charmhelpers.core.reactive import when | ||
4877 | 2 | from charmhelpers.core import hookenv | ||
4878 | 3 | |||
4879 | 4 | @when('db.database.available') | ||
4880 | 5 | def pretend_we_have_db(pgsql): | ||
4881 | 6 | hookenv.log("Got db: %s:%s" %(pgsql.host(), pgsql.database())) | ||
4882 | 0 | 7 | ||
4883 | === added file 'tests/trusty/c/README.md' | |||
4884 | --- tests/trusty/c/README.md 1970-01-01 00:00:00 +0000 | |||
4885 | +++ tests/trusty/c/README.md 2015-08-31 19:32:56 +0000 | |||
4886 | @@ -0,0 +1,1 @@ | |||
4887 | 1 | This is an overridden readme file | ||
4888 | 0 | 2 | ||
4889 | === added file 'tests/trusty/c/composer.yaml' | |||
4890 | --- tests/trusty/c/composer.yaml 1970-01-01 00:00:00 +0000 | |||
4891 | +++ tests/trusty/c/composer.yaml 2015-08-31 19:32:56 +0000 | |||
4892 | @@ -0,0 +1,1 @@ | |||
4893 | 1 | includes: ["trusty/a", "interface:pgsql"] | ||
4894 | 0 | 2 | ||
4895 | === added file 'tests/trusty/c/metadata.yaml' | |||
4896 | --- tests/trusty/c/metadata.yaml 1970-01-01 00:00:00 +0000 | |||
4897 | +++ tests/trusty/c/metadata.yaml 2015-08-31 19:32:56 +0000 | |||
4898 | @@ -0,0 +1,11 @@ | |||
4899 | 1 | name: c | ||
4900 | 2 | summary: An imagined extension to the a charm | ||
4901 | 3 | maintainer: None | ||
4902 | 4 | description: | | ||
4903 | 5 | Test layer c | ||
4904 | 6 | categories: | ||
4905 | 7 | - app | ||
4906 | 8 | requires: | ||
4907 | 9 | db: | ||
4908 | 10 | interface: pgsql | ||
4909 | 11 | |||
4910 | 0 | 12 | ||
4911 | === added directory 'tests/trusty/chlayer' | |||
4912 | === added directory 'tests/trusty/chlayer/hooks' | |||
4913 | === added file 'tests/trusty/chlayer/hooks/charmhelpers.pypi' | |||
4914 | --- tests/trusty/chlayer/hooks/charmhelpers.pypi 1970-01-01 00:00:00 +0000 | |||
4915 | +++ tests/trusty/chlayer/hooks/charmhelpers.pypi 2015-08-31 19:32:56 +0000 | |||
4916 | @@ -0,0 +1,1 @@ | |||
4917 | 1 | charmhelpers | ||
4918 | 0 | 2 | ||
4919 | === added directory 'tests/trusty/mysql' | |||
4920 | === added file 'tests/trusty/mysql/.bzrignore' | |||
4921 | --- tests/trusty/mysql/.bzrignore 1970-01-01 00:00:00 +0000 | |||
4922 | +++ tests/trusty/mysql/.bzrignore 2015-08-31 19:32:56 +0000 | |||
4923 | @@ -0,0 +1,2 @@ | |||
4924 | 1 | bin/ | ||
4925 | 2 | .venv | ||
4926 | 0 | 3 | ||
4927 | === added file 'tests/trusty/mysql/Makefile' | |||
4928 | --- tests/trusty/mysql/Makefile 1970-01-01 00:00:00 +0000 | |||
4929 | +++ tests/trusty/mysql/Makefile 2015-08-31 19:32:56 +0000 | |||
4930 | @@ -0,0 +1,24 @@ | |||
4931 | 1 | #!/usr/bin/make | ||
4932 | 2 | PYTHON := /usr/bin/env python | ||
4933 | 3 | export PYTHONPATH := hooks | ||
4934 | 4 | |||
4935 | 5 | virtualenv: | ||
4936 | 6 | virtualenv .venv | ||
4937 | 7 | .venv/bin/pip install flake8 nose mock six | ||
4938 | 8 | |||
4939 | 9 | lint: virtualenv | ||
4940 | 10 | .venv/bin/flake8 --exclude hooks/charmhelpers hooks | ||
4941 | 11 | @charm proof | ||
4942 | 12 | |||
4943 | 13 | test: virtualenv | ||
4944 | 14 | @echo Starting tests... | ||
4945 | 15 | @sudo apt-get install python-six | ||
4946 | 16 | @.venv/bin/nosetests --nologcapture unit_tests | ||
4947 | 17 | |||
4948 | 18 | bin/charm_helpers_sync.py: | ||
4949 | 19 | @mkdir -p bin | ||
4950 | 20 | @bzr cat lp:charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py \ | ||
4951 | 21 | > bin/charm_helpers_sync.py | ||
4952 | 22 | |||
4953 | 23 | sync: bin/charm_helpers_sync.py | ||
4954 | 24 | $(PYTHON) bin/charm_helpers_sync.py -c charm-helpers.yaml | ||
4955 | 0 | 25 | ||
4956 | === added file 'tests/trusty/mysql/README.md' | |||
4957 | --- tests/trusty/mysql/README.md 1970-01-01 00:00:00 +0000 | |||
4958 | +++ tests/trusty/mysql/README.md 2015-08-31 19:32:56 +0000 | |||
4959 | @@ -0,0 +1,133 @@ | |||
4960 | 1 | # Overview | ||
4961 | 2 | |||
4962 | 3 | [MySQL](http://www.mysql.com) is a fast, stable and true multi-user, multi-threaded SQL database server. SQL (Structured Query Language) is the most popular database query language in the world. The main goals of MySQL are speed, robustness and ease of use. | ||
4963 | 4 | |||
4964 | 5 | This charm also can deploy [Percona Server](http://www.percona.com/software/percona-server) is fork of MySQL by Percona Inc. which focuses on maximizing performance, particularly for heavy workloads. It is a drop-in replacement for MySQL and features XtraDB, a drop-in replacement for the InnoDB storage engine. | ||
4965 | 6 | |||
4966 | 7 | # Usage | ||
4967 | 8 | |||
4968 | 9 | ## General Usage | ||
4969 | 10 | |||
4970 | 11 | To deploy a MySQL service: | ||
4971 | 12 | |||
4972 | 13 | juju deploy mysql | ||
4973 | 14 | |||
4974 | 15 | Once deployed, you can retrieve the MySQL root user password by logging in to the machine via `juju ssh` and readin the `/var/lib/mysql/mysql.passwd` file. To log in as root MySQL User at the MySQL console you can issue the following: | ||
4975 | 16 | |||
4976 | 17 | juju ssh mysql/0 | ||
4977 | 18 | mysql -u root -p`sudo cat /var/lib/mysql/mysql.passwd` | ||
4978 | 19 | |||
4979 | 20 | ## Backups | ||
4980 | 21 | |||
4981 | 22 | The charm supports simple backups. To enable them set `backup_schedule` option. Optionally you can override default `backup_dir` and/or `backup_retention_count`: | ||
4982 | 23 | |||
4983 | 24 | juju set mysql backup_schedule="45 5 * * *" # cron formatted schedule | ||
4984 | 25 | juju set mysql backup_dir="/mnt/backup" | ||
4985 | 26 | juju set mysql backup_retention_count=28 | ||
4986 | 27 | |||
4987 | 28 | # Scale Out Usage | ||
4988 | 29 | |||
4989 | 30 | ## Replication | ||
4990 | 31 | |||
4991 | 32 | MySQL supports the ability to replicate databases to slave instances. This | ||
4992 | 33 | allows you, for example, to load balance read queries across multiple slaves or | ||
4993 | 34 | use a slave to perform backups, all whilst not impeding the master's | ||
4994 | 35 | performance. | ||
4995 | 36 | |||
4996 | 37 | To deploy a slave: | ||
4997 | 38 | |||
4998 | 39 | # deploy second service | ||
4999 | 40 | juju deploy mysql mysql-slave | ||
5000 | 41 |
Need to add blessings, ruamel.yaml, pathspec, and bundletester to the install_requires in setup.py