Merge lp:~mike-amy/sahana-eden/climate into lp:sahana-eden

Proposed by Mike Amy
Status: Merged
Merged at revision: 2720
Proposed branch: lp:~mike-amy/sahana-eden/climate
Merge into: lp:sahana-eden
Diff against target: 7720 lines (+4013/-2676) (has conflicts)
55 files modified
controllers/climate.py (+159/-96)
deployment-templates/models/000_config.py (+1/-1)
models/03_gis.py (+2/-0)
models/climate.py (+5/-0)
modules/ClimateDataPortal/MapPlugin.py (+430/-0)
modules/ClimateDataPortal/__init__.py (+201/-0)
modules/ClimateDataPortal/import_NetCDF_readings.py (+131/-0)
modules/ClimateDataPortal/import_stations.py (+53/-0)
modules/ClimateDataPortal/import_tabbed_readings.py (+154/-0)
modules/s3/s3gis.py (+785/-975)
modules/test_utils/AddedRole.py (+25/-0)
modules/test_utils/Change.py (+25/-0)
modules/test_utils/ExpectSessionWarning.py (+14/-0)
modules/test_utils/ExpectedException.py (+13/-0)
modules/test_utils/InsertedRecord.py (+19/-0)
modules/test_utils/Web2pyNosePlugin.py (+106/-0)
modules/test_utils/__init__.py (+11/-1)
modules/test_utils/assert_equal.py (+60/-0)
modules/test_utils/clear_table.py (+4/-0)
modules/test_utils/find_JSON_format_data_structure.py (+54/-0)
modules/test_utils/run.py (+76/-239)
private/prepopulate/default/tasks.cfg (+0/-18)
static/scripts/S3/s3.gis.climate.js (+352/-134)
tests/climate/__init__.py (+101/-0)
tests/nose.py (+2/-2)
tests/unit_tests/gis/basic_map.html (+0/-91)
tests/unit_tests/gis/bing.html (+0/-62)
tests/unit_tests/gis/feature_queries.html (+0/-52)
tests/unit_tests/gis/google.html (+0/-67)
tests/unit_tests/gis/map_with_layers.html (+0/-150)
tests/unit_tests/gis/s3gis.py (+0/-417)
tests/unit_tests/gis/testgis.cmd (+0/-8)
tests/unit_tests/gis/true_code_paths.html (+0/-302)
tests/unit_tests/gis/yahoo.html (+0/-61)
tests/unit_tests/modules/s3/s3gis/BingLayer.py (+28/-0)
tests/unit_tests/modules/s3/s3gis/CommonScripts.py (+47/-0)
tests/unit_tests/modules/s3/s3gis/FeatureLayer.py (+25/-0)
tests/unit_tests/modules/s3/s3gis/FeatureQueries.py (+44/-0)
tests/unit_tests/modules/s3/s3gis/GPXLayer.py (+29/-0)
tests/unit_tests/modules/s3/s3gis/GeoJSONLayer.py (+28/-0)
tests/unit_tests/modules/s3/s3gis/GeoRSSLayer.py (+74/-0)
tests/unit_tests/modules/s3/s3gis/GoogleLayer.py (+84/-0)
tests/unit_tests/modules/s3/s3gis/KMLLayer.py (+70/-0)
tests/unit_tests/modules/s3/s3gis/LayerFailures.py (+117/-0)
tests/unit_tests/modules/s3/s3gis/OpenStreetMap.py (+31/-0)
tests/unit_tests/modules/s3/s3gis/TMSLayer.py (+30/-0)
tests/unit_tests/modules/s3/s3gis/TrueCodePaths.py (+307/-0)
tests/unit_tests/modules/s3/s3gis/UserInterface.py (+7/-0)
tests/unit_tests/modules/s3/s3gis/WFSLayer.py (+31/-0)
tests/unit_tests/modules/s3/s3gis/WMSLayer.py (+28/-0)
tests/unit_tests/modules/s3/s3gis/YahooLayer.py (+48/-0)
tests/unit_tests/modules/s3/s3gis/__init__.py (+68/-0)
tests/unit_tests/modules/s3/s3rest.py (+6/-0)
tests/unit_tests/modules/test_utils/find_JSON_format_data_structure.py (+52/-0)
views/climate/chart_popup.html (+76/-0)
Text conflict in modules/s3/s3gis.py
To merge this branch: bzr merge lp:~mike-amy/sahana-eden/climate
Reviewer Review Type Date Requested Status
Fran Boon Pending
Review via email: mp+74202@code.launchpad.net

This proposal supersedes a proposal from 2011-09-05.

Description of the change

Reverted the error handling in the mentioned method.
Kind of scary that there are invisible rules surrounding whether or not there is a session.

==========

Fixed merge problems, thanks flavour.

views/climate/chart_popup.html uses {{include jquery.html}}
reverted file 'private/prepopulate/default/tasks.cfg'
I think that was some strange merge behaviour as I'd added that file when there was a bug around it being '.txt'.

removed the r=request from: # self.url = "%s/%s" % (URL(r=request, c="default", f="download"),

Removed single-quotes from Python code and double-quotes from javascript code. Some double quotes in the javascript code have been left in as the generated javascript data-structures are tested by parsing as JSON, which doesn't accept single-quotes.

==========

Added NetCDF importer for climate data, improved performance of the overlay layer generation.
Fixed tests, updated to use "current" global variable.

To post a comment you must log in.
Revision history for this message
Fran Boon (flavour) wrote : Posted in a previous version of this proposal

views/climate/chart_popup.html
Please {{include jquery.html}} instead of hardcoding the jquery version.
You are already a point version out & we'll shortly go up to the newly released 1.6.3...

This should be reverted:
=== removed file 'private/prepopulate/default/tasks.cfg'

Can remove the r=request from:
+# self.url = "%s/%s" % (URL(r=request, c="default", f="download"),

I also see a very large number of single quotes in the Python code & some double quotes in the javascript....I'm happy to clean up the odd one that slips through, but this number seems excessive for me.

review: Needs Fixing
Revision history for this message
Fran Boon (flavour) wrote : Posted in a previous version of this proposal

Thanks for cleaning up.

self.debug isn't appropriate for use in gis.import_csv()
This function only gets used via the CLI currently.
session doesn't exist here so session.s3.debug will raise an error.
I definitely want the old behaviour - i.e. provide a warning message to the console, but don't stop what you're doing (we can choose not to db.commit() if we see the errors as fatal). No point in adding stuff to session which won't be seen.
I'm not keen on it elsewhere, but I'll accept it for now at least, but in that location it's clearly wrong.

review: Needs Fixing

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'controllers/climate.py'
--- controllers/climate.py 2011-08-06 18:24:53 +0000
+++ controllers/climate.py 2011-09-06 11:51:25 +0000
@@ -2,92 +2,14 @@
22
3module = "climate"3module = "climate"
44
5class ClimateDataMapPlugin(object):5ClimateDataPortal = local_import("ClimateDataPortal")
6 def __init__(self,6
7 data_type_option_names,7sample_type_names = ClimateDataPortal.sample_codes.keys()
8 parameter_names,8variable_names = ClimateDataPortal.tables.keys()
9 projected_option_type_names,9
10 year_min,10map_plugin = ClimateDataPortal.MapPlugin(
11 year_max11 data_type_option_names = sample_type_names,
12 ):12 parameter_names = variable_names,
13 self.data_type_option_names = data_type_option_names
14 self.parameter_names = parameter_names
15 self.projected_option_type_names = projected_option_type_names
16 self.year_min = year_min
17 self.year_max = year_max
18
19 def extend_gis_map(self, add_javascript, add_configuration):
20 add_javascript("scripts/S3/s3.gis.climate.js")
21 add_configuration(
22 SCRIPT(
23 "\n".join((
24 "registerPlugin(",
25 " new ClimateDataMapPlugin("+
26 json.dumps(
27 dict(
28 self.__dict__,
29 data_type_label = str(T("Data Type")),
30 projected_option_type_label = str(T("Projection Type"))
31 ),
32 indent = 4
33 )+
34 ")",
35 ")",
36 ))
37 )
38 )
39
40 def add_html(self, html):
41 statistics_widget = FIELDSET(
42 LEGEND("Statistics"),
43 UL(
44 _style="list-style:none",
45 *(
46 LI(
47 INPUT(
48 _type="radio",
49 _name="statistics",
50 _id="id_%s" % statistic,
51 ),
52 LABEL(
53 statistic,
54 _for="id_%s" % statistic,
55 )
56 )
57 for statistic in ["Mean", "Max", "Min"]
58 )
59 )
60 )
61
62 html.append(
63 DIV(
64 FORM(
65 _id="controller",
66 *(
67 SCRIPT(
68 _type="text/javascript",
69 *["""
70 """]
71 ),
72 climate_data_type_widget,
73 parameters_widget,
74 statistics_widget,
75 period_widget
76 )
77 )
78 )
79 )
80
81 def get_image_overlay(self, ):
82 from gluon.contenttype import contenttype
83 response.headers["Content-Type"] = contenttype(".png")
84 # @ToDo: Should be a file in static
85 return response.stream(open("/Users/mike/Desktop/red_wave.png"))
86
87climate_data_map_plugin = ClimateDataMapPlugin(
88 data_type_option_names = ["Observed", "Gridded", "Projected"],
89 parameter_names = ["Rainfall", "Temperature", "Wind", "Humidity", "Sunshine"],
90 projected_option_type_names = ["RC Model", "GC Model", "Scenario"],
91 year_max = datetime.date.today().year,13 year_max = datetime.date.today().year,
92 year_min = 1960,14 year_min = 1960,
93)15)
@@ -120,16 +42,16 @@
120 print_tool = {"url": print_service}42 print_tool = {"url": print_service}
121 else:43 else:
122 print_tool = {}44 print_tool = {}
12345
124 map = gis.show_map(46 map = gis.show_map(
47 lat = 28.5,
48 lon = 84.1,
49 zoom = 7,
125 toolbar = False,50 toolbar = False,
126 catalogue_toolbar=catalogue_toolbar, # T/F, top tabs toolbar51# catalogue_toolbar=catalogue_toolbar, # T/F, top tabs toolbar
127 wms_browser = wms_browser, # dict52 wms_browser = wms_browser, # dict
128 catalogue_layers=catalogue_layers, # T/F
129 mouse_position = deployment_settings.get_gis_mouse_position(),
130 print_tool = print_tool,
131 plugins = [53 plugins = [
132 climate_data_map_plugin54 map_plugin
133 ]55 ]
134 )56 )
13557
@@ -138,7 +60,148 @@
138 module_name=module_name,60 module_name=module_name,
139 map=map61 map=map
140 )62 )
14163
142def climate_image_overlay():64month_names = dict(
143 return climate_data_map_plugin.get_image_overlay()65 January=1,
14466 February=2,
67 March=3,
68 April=4,
69 May=5,
70 June=6,
71 July=7,
72 August=8,
73 September=9,
74 October=10,
75 November=11,
76 December=12
77)
78
79for name, number in month_names.items():
80 month_names[name[:3]] = number
81for name, number in month_names.items():
82 month_names[name.upper()] = number
83for name, number in month_names.items():
84 month_names[name.lower()] = number
85
86def convert_date(default_month):
87 def converter(year_month):
88 components = year_month.split("-")
89 year = int(components[0])
90 assert 1960 <= year, "year must be >= 1960"
91
92 try:
93 month_value = components[1]
94 except IndexError:
95 month = default_month
96 else:
97 try:
98 month = int(month_value)
99 except TypeError:
100 month = month_names[month_value]
101
102 assert 1 <= month <= 12, "month must be in range 1:12"
103 return datetime.date(year, month, 1)
104 return converter
105
106def one_of(options):
107 def validator(choice):
108 assert choice in options, "should be one of %s, not '%s'" % (
109 options,
110 choice
111 )
112 return choice
113 return validator
114
115def climate_overlay_data():
116 kwargs = dict(request.vars)
117 kwargs["parameter"] = kwargs["parameter"].replace("+", " ")
118
119 arguments = {}
120 errors = []
121 for kwarg_name, converter in dict(
122 data_type = one_of(sample_type_names),
123 statistic = one_of(("Maximum", "Minimum", "Average")),
124 parameter = one_of(variable_names),
125 from_date = convert_date(default_month = 1),
126 to_date = convert_date(default_month = 12),
127 ).iteritems():
128 try:
129 value = kwargs.pop(kwarg_name)
130 except KeyError:
131 errors.append("%s missing" % kwarg_name)
132 else:
133 try:
134 arguments[kwarg_name] = converter(value)
135 except TypeError:
136 errors.append("%s is wrong type" % kwarg_name)
137 except AssertionError, assertion_error:
138 errors.append("%s: %s" % (kwarg_name, assertion_error))
139 if kwargs:
140 errors.append("Unexpected arguments: %s" % kwargs.keys())
141
142 if errors:
143 raise HTTP(500, "<br />".join(errors))
144 else:
145 import gluon.contenttype
146 data_path = map_plugin.get_overlay_data(
147 env = Storage(globals()),
148 **arguments
149 )
150 return response.stream(
151 open(data_path,"rb"),
152 chunk_size=4096
153 )
154
155def list_of(converter):
156 def convert_list(choices):
157 return map(converter, choices)
158 return convert_list
159
160def climate_chart():
161 kwargs = dict(request.vars)
162 import simplejson as JSON
163 specs = JSON.loads(kwargs.pop("spec"))
164
165 checked_specs = []
166 for spec in specs:
167 arguments = {}
168 errors = []
169 for name, converter in dict(
170 data_type = one_of(sample_type_names),
171 parameter = one_of(variable_names),
172 from_date = convert_date(default_month = 1),
173 to_date = convert_date(default_month = 12),
174 place_ids = list_of(int)
175 ).iteritems():
176 try:
177 value = spec.pop(name)
178 except KeyError:
179 errors.append("%s missing" % name)
180 else:
181 try:
182 arguments[name] = converter(value)
183 except TypeError:
184 errors.append("%s is wrong type" % name)
185 except AssertionError, assertion_error:
186 errors.append("%s: %s" % (name, assertion_error))
187 if spec:
188 errors.append("Unexpected arguments: %s" % spec.keys())
189 checked_specs.append(arguments)
190
191 if errors:
192 raise HTTP(500, "<br />".join(errors))
193 else:
194 import gluon.contenttype
195 response.headers["Content-Type"] = gluon.contenttype.contenttype(".png")
196 data_image_file_path = map_plugin.render_plots(
197 env = Storage(globals()),
198 specs = checked_specs
199 )
200 return response.stream(
201 open(data_image_file_path,"rb"),
202 chunk_size=4096
203 )
204
205def chart_popup():
206 return {}
207
145208
=== modified file 'deployment-templates/models/000_config.py'
--- deployment-templates/models/000_config.py 2011-08-25 09:17:02 +0000
+++ deployment-templates/models/000_config.py 2011-09-06 11:51:25 +0000
@@ -249,7 +249,7 @@
249 strict_hierarchy = False,249 strict_hierarchy = False,
250 # Should all specific locations (e.g. addresses, waypoints) be required to250 # Should all specific locations (e.g. addresses, waypoints) be required to
251 # link to where they are in the location hierarchy?251 # link to where they are in the location hierarchy?
252 location_parent_required = False,252 location_parent_required = False
253)253)
254# Set this if there will be multiple areas in which work is being done,254# Set this if there will be multiple areas in which work is being done,
255# and a menu to select among them is wanted. With this on, any map255# and a menu to select among them is wanted. With this on, any map
256256
=== modified file 'models/03_gis.py'
--- models/03_gis.py 2011-09-05 22:10:34 +0000
+++ models/03_gis.py 2011-09-06 11:51:25 +0000
@@ -1369,6 +1369,8 @@
1369# =============================================================================1369# =============================================================================
1370def gis_map_tables():1370def gis_map_tables():
1371 """ Load the GIS Map Tables when needed """1371 """ Load the GIS Map Tables when needed """
1372 if "gis_layer_bing" in db.tables:
1373 return
13721374
1373 # -------------------------------------------------------------------------1375 # -------------------------------------------------------------------------
1374 # GPS Waypoints1376 # GPS Waypoints
13751377
=== added file 'models/climate.py'
--- models/climate.py 1970-01-01 00:00:00 +0000
+++ models/climate.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,5 @@
1# -*- coding: utf-8 -*-
2
3module = "climate"
4if deployment_settings.has_module(module):
5 local_import("ClimateDataPortal").define_models(env = Storage(globals()))
06
=== added directory 'modules/ClimateDataPortal'
=== added file 'modules/ClimateDataPortal/MapPlugin.py'
--- modules/ClimateDataPortal/MapPlugin.py 1970-01-01 00:00:00 +0000
+++ modules/ClimateDataPortal/MapPlugin.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,430 @@
1
2# notes:
3
4# dependencies:
5# R
6
7# create folder for cache:
8# mkdir -p /tmp/climate_data_portal/images/recent/
9# mkdir -p /tmp/climate_data_portal/images/older/
10
11MAX_CACHE_FOLDER_SIZE = 2**24 # 16 MiB
12
13class TwoStageCache(object):
14 def __init__(self, folder, max_size):
15 self.folder = folder
16 self.max_size
17
18 def purge(self):
19 pass
20
21 def retrieve(self, file_name, generate_if_not_found):
22 pass
23
24import os, errno
25
26def mkdir_p(path):
27 try:
28 os.makedirs(path)
29 except OSError as exc: # Python >2.5
30 if exc.errno == errno.EEXIST:
31 pass
32 else: raise
33
34def define(env, place, tables, date_to_month_number, sample_codes, exports):
35 # This starts an R interpreter.
36 # As we are sharing it (restarting it every time is inefficient),
37 # we have to be somewhat careful to make sure objects are garbage collected
38 # better to just not stick anything in R's globals
39 try:
40 import rpy2.robjects as robjects
41 except ImportError:
42 import logging
43 logging.getLogger().error(
44"""R is required by the climate data portal to generate charts
45
46To install R: refer to:
47http://cran.r-project.org/doc/manuals/R-admin.html
48
49
50rpy2 is required to interact with python.
51
52To install rpy2, refer to:
53http://rpy.sourceforge.net/rpy2/doc-dev/html/overview.html
54""")
55 raise
56
57 R = robjects.r
58
59 from rpy2.robjects.packages import importr
60
61 base = importr("base")
62
63 from math import fsum
64 def average(values):
65 "Safe float average"
66 l = len(values)
67 if l is 0:
68 return None
69 else:
70 return fsum(values)/l
71
72 class Maximum(object):
73 def __init__(self, column, add_query_term):
74 self.value_max = value_max = column.max()
75 add_query_term(value_max)
76
77 def __call__(self, row):
78 return row._extra[self.value_max]
79
80 class Minimum(object):
81 def __init__(self, column, add_query_term):
82 self.value_min = value_min = column.min()
83 add_query_term(value_min)
84
85 def __call__(self, row):
86 return row._extra[self.value_min]
87
88 class Average(object):
89 def __init__(self, column, add_query_term):
90 self.value_sum = value_sum = column.sum()
91 self.value_count = value_count = column.count()
92 add_query_term((
93 value_sum,
94 value_count
95 ))
96
97 def __call__(self, row):
98 return row._extra[self.value_sum] / row._extra[self.value_count]
99
100 aggregators = {
101 "Maximum": Maximum,
102 "Minimum": Minimum,
103 "Average": Average
104 }
105
106 def get_cached_or_generated_file(cache_file_name, generate):
107 from os.path import join, exists
108 from os import stat, makedirs
109 # this needs to become a setting
110 climate_data_image_cache_path = join(
111 "/tmp","climate_data_portal","images"
112 )
113 recent_cache = join(climate_data_image_cache_path, "recent")
114 mkdir_p(recent_cache)
115 older_cache = join(climate_data_image_cache_path, "older")
116 mkdir_p(older_cache)
117 recent_cache_path = join(recent_cache, cache_file_name)
118 if not exists(recent_cache_path):
119 older_cache_path = join(older_cache, cache_file_name)
120 if exists(older_cache_path):
121 # move the older cache to the recent folder
122 rename(older_cache_path, recent_cache_path)
123 else:
124 generate(recent_cache_path)
125 file_path = recent_cache_path
126
127 # update the folder size file (race condition?)
128 folder_size_file_path = join(climate_data_image_cache_path, "size")
129 folder_size_file = open(folder_size_file_path, "w+")
130 folder_size_file_contents = folder_size_file.read()
131 try:
132 folder_size = int(folder_size_file_contents)
133 except ValueError:
134 folder_size = 0
135 folder_size_file.seek(0)
136 folder_size_file.truncate()
137 folder_size += stat(file_path).st_size
138 if folder_size > MAX_CACHE_FOLDER_SIZE:
139 rmdir(older_cache)
140
141 folder_size_file.write(str(folder_size))
142 folder_size_file.close()
143 else:
144 # use the existing cached image
145 file_path = recent_cache_path
146 return file_path
147
148 class MapPlugin(object):
149 def __init__(
150 self,
151 data_type_option_names,
152 parameter_names,
153 year_min,
154 year_max
155 ):
156 self.data_type_option_names = data_type_option_names
157 self.parameter_names = parameter_names
158 self.year_min = year_min
159 self.year_max = year_max
160
161 def extend_gis_map(self, add_javascript, add_configuration):
162 add_javascript("scripts/S3/s3.gis.climate.js")
163 SCRIPT = env.SCRIPT
164 T = env.T
165 import json
166
167 add_configuration(
168 SCRIPT(
169 "\n".join((
170 "registerPlugin(",
171 " new ClimateDataMapPlugin("+
172 json.dumps(
173 dict(
174 data_type_option_names = self.data_type_option_names,
175 parameter_names = self.parameter_names,
176 year_min = self.year_min,
177 year_max = self.year_max,
178 overlay_data_URL = "/%s/climate/climate_overlay_data" % (
179 env.request.application
180 ),
181 chart_URL = "/%s/climate/climate_chart" % (
182 env.request.application
183 ),
184 data_type_label = str(T("Data Type")),
185 projected_option_type_label = str(
186 T("Projection Type")
187 )
188 ),
189 indent = 4
190 )+
191 ")",
192 ")",
193 ))
194 )
195 )
196
197
198 def get_overlay_data(
199 self,
200 env,
201 data_type,
202 parameter,
203 from_date,
204 to_date,
205 statistic
206 ):
207 from_month = date_to_month_number(from_date)
208 to_month = date_to_month_number(to_date)
209 def generate_map_overlay_data(file_path):
210 # generate the new file in the recent folder
211
212 db = env.db
213 sample_table_name, sample_table = tables[parameter]
214 place = db.place
215 #sample_table = db[sample_table_name]
216
217 query = [
218 place.id,
219 place.longitude,
220 place.latitude,
221 ]
222 aggregator = aggregators[statistic](
223 sample_table.value,
224 query.append
225 )
226
227 sample_rows = db(
228 (sample_table.time_period >= from_month) &
229 (sample_table.time_period <= to_month) &
230 (sample_table.sample_type == sample_codes[data_type]) &
231 (place.id == sample_table.place_id)
232 ).select(
233 *query,
234 groupby=sample_table.place_id
235 )
236
237 # map positions to data
238 # find max and min value
239 positions = {}
240 aggregated_values = []
241 for row in sample_rows:
242 place = row.place
243 aggregated_value = aggregator(row)
244 aggregated_values.append(aggregated_value)
245 positions[place.id] = (
246 place.latitude,
247 place.longitude,
248 aggregated_value
249 )
250 max_aggregated_value = max(aggregated_values)
251 min_aggregated_value = min(aggregated_values)
252 aggregated_range = max_aggregated_value - min_aggregated_value
253
254 data_lines = []
255 write = data_lines.append
256 from colorsys import hsv_to_rgb
257 for id, (lat, lon, aggregated_value) in positions.iteritems():
258 north = lat + 0.05
259 south = lat - 0.05
260 east = lon + 0.05
261 west = lon - 0.05
262 # only hue changes
263 # hue range is from 2/3 (blue, low) to 0 (red, high)
264 normalised_value = 1.0-((aggregated_value - min_aggregated_value) / aggregated_range)
265 r,g,b = hsv_to_rgb(normalised_value *(2.0/3.0), 1.0, 1.0)
266 hex_colour = "%02x%02x%02x" % (r*255, g*255, b*255)
267 write(
268 "Vector("
269 "Polygon(["
270 "LinearRing(["
271 "Point(%(north)f,%(west)f),"
272 "Point(%(north)f,%(east)f),"
273 "Point(%(south)f,%(east)f),"
274 "Point(%(south)f,%(west)f)"
275 "])"
276 "]),"
277 "{"
278 "value:%(aggregated_value)f,"
279 "id:%(id)i"
280 "},"
281 "{"
282 "fillColor:'#%(hex_colour)s'"
283 "}"
284 ")," % locals()
285 )
286 overlay_data_file = open(file_path, "w")
287 write = overlay_data_file.write
288 write("{")
289 if max_aggregated_value < 10:
290 float_format = "%0.2f"
291 if max_aggregated_value < 100:
292 float_format = "%0.1f"
293 elif max_aggregated_value < 10000:
294 float_format = "%0.0f"
295 else:
296 float_format = "%0.2e"
297 write("max:%s," % float_format % max_aggregated_value)
298 write("min:%s," % float_format % min_aggregated_value)
299 write("features:[")
300 write("".join(data_lines))
301 overlay_data_file.seek(-1, 1) # delete last ",'
302 write("]}")
303 overlay_data_file.close()
304
305 return get_cached_or_generated_file(
306 "_".join((
307 statistic,
308 data_type,
309 parameter,
310 str(from_month),
311 str(to_month),
312 ".js"
313 )),
314 generate_map_overlay_data
315 )
316
317 def render_plots(
318 self,
319 env,
320 specs
321 ):
322 def generate_chart(file_path):
323 def render_plot(
324 data_type,
325 parameter,
326 from_date,
327 to_date,
328 place_ids
329 ):
330 from_month = date_to_month_number(from_date)
331 to_month = date_to_month_number(to_date)
332
333 db = env.db
334 sample_table_name, sample_table = tables[parameter]
335 place = db.place
336 #sample_table = db[sample_table_name]
337 sample_rows = db(
338 (sample_table.time_period >= from_month) &
339 (sample_table.time_period <= to_month) &
340 (sample_table.sample_type == sample_codes[data_type]) &
341 (sample_table.place_id.belongs(place_ids))
342 ).select(
343 sample_table.value,
344 sample_table.time_period,
345 )
346
347 # coalesce values by time_period:
348 aggregated_values = {}
349 for sample_row in sample_rows:
350 time_period = sample_row.time_period
351 value = sample_row.value
352 try:
353 aggregated_values[time_period]
354 except KeyError:
355 aggregated_values[time_period] = value
356 else:
357 aggregated_values[time_period] += value
358
359 values = []
360 time_periods = aggregated_values.keys()
361 time_periods.sort()
362 for time_period in time_periods:
363 values.append(aggregated_values[time_period])
364 return from_date, to_date, data_type, parameter, values
365
366 time_serieses = []
367 c = R("c")
368 for spec in specs:
369 from_date, to_date, data_type, parameter, values = render_plot(**spec)
370 time_serieses.append(
371 R("ts")(
372 robjects.FloatVector(values),
373 start = c(from_date.year, from_date.month),
374 end = c(to_date.year, to_date.month),
375 frequency = 12
376 )
377 )
378
379 R("png(filename = '%s', width=640, height=480)" % file_path)
380 plot_chart = R(
381 "function (xlab, ylab, n, ...) {"
382 "ts.plot(...,"
383 "gpars=list(xlab=xlab, ylab=ylab, col=c(1:n))"
384 ")"
385 "}"
386 )
387
388 plot_chart(
389 "Date",
390 "Combined %s %s" % (data_type, parameter),
391 len(time_serieses),
392 *time_serieses
393 )
394 R("dev.off()")
395
396 import md5
397 import gluon.contrib.simplejson as JSON
398
399 import datetime
400 def serialiseDate(obj):
401 if isinstance(
402 obj,
403 (
404 datetime.date,
405 datetime.datetime,
406 datetime.time
407 )
408 ):
409 return obj.isoformat()[:19].replace("T"," ")
410 raise TypeError("%r is not JSON serializable" % (obj,))
411
412 return get_cached_or_generated_file(
413 "_".join((
414 md5.md5(
415 JSON.dumps(
416 specs,
417 sort_keys=True,
418 default=serialiseDate
419 )
420 ).hexdigest(),
421 ".png"
422 )),
423 generate_chart
424 )
425
426 exports.update(
427 MapPlugin = MapPlugin
428 )
429
430 del globals()["define"]
0431
=== added file 'modules/ClimateDataPortal/__init__.py'
--- modules/ClimateDataPortal/__init__.py 1970-01-01 00:00:00 +0000
+++ modules/ClimateDataPortal/__init__.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,201 @@
1
2"""
3 Climate Data Module
4
5 @author: Mike Amy
6"""
7
8# datasets are stored in actual tables
9# - e.g. rainfall_mm
10
11# data collection points in dataset
12# values at a point within a time range
13
14# e.g. observed temperature in Kathmandu between Feb 2006 - April 2007
15
16
17sample_types = dict(
18 O = "Observed",
19 G = "Gridded",
20
21 r = "Projected (RC)",
22 g = "Projected (GC)",
23 s = "Scenario",
24)
25
26sample_codes = {}
27
28import re
29for code, name in sample_types.iteritems():
30 globals()[re.sub("\W", "", name)] = code
31 sample_codes[name] = code
32
33
34# Until I figure out how to sanely import things from web2py,
35# apply a prophylactic import method...
36def define_models(env):
37 """
38 Define Climate Data models.
39 """
40 db = env.db
41 Field = env.Field
42
43 def create_index(table_name, field_name):
44 db.executesql(
45 """
46 CREATE INDEX IF NOT EXISTS
47 "index_%(table_name)s__%(field_name)s"
48 ON "%(table_name)s" ("%(field_name)s");
49 """ % locals()
50 )
51
52 place = db.define_table(
53 "place",
54 Field(
55 "longitude",
56 "double",
57 notnull=True,
58 required=True,
59 ),
60 Field(
61 "latitude",
62 "double",
63 notnull=True,
64 required=True,
65 )
66 )
67
68 # not all places are stations with elevations
69 # as in the case of "gridded" data
70 # a station can only be in one place
71 observation_station = db.define_table(
72 "observation_station",
73 Field(
74 "id",
75 "id", # must be a place,
76 notnull=True,
77 required=True,
78 ),
79 Field(
80 "name",
81 "string",
82 notnull=True,
83 unique=False,
84 required=True,
85 ),
86 Field(
87 "elevation_metres",
88 "integer"
89 )
90 )
91
92 def sample_table(name, value_type):
93 table = db.define_table(
94 name,
95 Field(
96 "sample_type",
97 "string",
98 length = 1,
99 notnull=True,
100 # necessary as web2py requires a default value even for
101 # not null fields
102 default="-1",
103 required=True
104 ),
105 Field(
106 "time_period",
107 "integer",
108 notnull=True,
109 default=-1000,
110 required=True
111 ),
112 Field(
113 # this should become a GIS field
114 "place_id",
115 place,
116 notnull=True,
117 required=True
118 ),
119 Field(
120 "value",
121 value_type,
122 notnull = True,
123 required=True,
124 ),
125 )
126
127 create_index(name, "id")
128 create_index(name, "sample_type")
129 create_index(name, "time_period")
130 create_index(name, "place_id")
131
132 return table
133
134 rainfall_mm = sample_table("climate_rainfall_mm", "double")
135 min_temperature_celsius = sample_table("climate_min_temperature_celsius", "double")
136 max_temperature_celsius = sample_table("climate_max_temperature_celsius", "double")
137
138 tables = {
139 "Rainfall mm": ("climate_rainfall_mm", rainfall_mm),
140 "Max Temperature C": ("climate_max_temperature_celsius", max_temperature_celsius),
141 "Min Temperature C": ("climate_min_temperature_celsius", min_temperature_celsius),
142 }
143
144 def year_month_to_month_number(year, month):
145 """Time periods are integers representing months in years,
146 from 1960 onwards.
147
148 e.g. 0 = Jan 1960, 1 = Feb 1960, 12 = Jan 1961
149
150 This function converts a year and month to a month number.
151 """
152 return ((year-1960) * 12) + (month-1)
153
154 def date_to_month_number(date):
155 """This function converts a date to a month number.
156
157 See also year_month_to_month_number(year, month)
158 """
159 return year_month_to_month_number(date.year, date.month)
160
161# def month_number_to_date(month_number):
162# ret
163
164 from .MapPlugin import define
165 define(
166 env,
167 place,
168 tables,
169 date_to_month_number,
170 sample_codes,
171 globals()
172 )
173
174 # exports:
175 globals().update(
176 sample_types = sample_types,
177
178 place = place,
179 observation_station = observation_station,
180
181 tables = tables,
182
183 rainfall_mm = rainfall_mm,
184 max_temperature_celsius = max_temperature_celsius,
185 min_temperature_celsius = min_temperature_celsius,
186
187 date_to_month_number = date_to_month_number,
188 year_month_to_month_number = year_month_to_month_number,
189 )
190
191 def redefine_models(env):
192 # avoid risking insidious aliasing bugs
193 # by not defining things more than once
194 env.db.update(
195 climate_rainfall_mm = rainfall_mm,
196 climate_max_temperature_celsius = max_temperature_celsius,
197 climate_min_temperature_celsius = min_temperature_celsius,
198 place = place,
199 observation_station = observation_station,
200 )
201 globals()["define_models"] = redefine_models
0202
=== added file 'modules/ClimateDataPortal/import_NetCDF_readings.py'
--- modules/ClimateDataPortal/import_NetCDF_readings.py 1970-01-01 00:00:00 +0000
+++ modules/ClimateDataPortal/import_NetCDF_readings.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,131 @@
1
2ClimateDataPortal = local_import("ClimateDataPortal")
3
4
5def get_or_create(dict, key, creator):
6 try:
7 value = dict[key]
8 except KeyError:
9 value = dict[key] = creator()
10 return value
11
12def get_or_create_record(table, query):
13 query_terms = []
14 for key, value in query.iteritems():
15 query_terms.append(getattr(table, key) == value)
16 reduced_query = reduce(
17 (lambda left, right: left & right),
18 query_terms
19 )
20 records = db(reduced_query).select()
21 count = len(records)
22 assert count <= 1, "Multiple records for %s" % query
23 if count == 0:
24 record = table.insert(**query)
25 db.commit()
26 else:
27 record = records.first()
28 return record.id
29
30def nearly(expected_float, actual_float):
31 return (expected_float * 0.999) < actual_float < (expected_float * 1.001)
32
33def add_reading_if_none(
34 database_table,
35 sample_type,
36 time_period,
37 place_id,
38 value
39):
40 records = db(
41 (database_table.sample_type == sample_type) &
42 (database_table.time_period == time_period) &
43 (database_table.place_id == place_id)
44 ).select(database_table.value, database_table.id)
45 count = len(records)
46 assert count <= 1
47 if count == 0:
48 database_table.insert(
49 sample_type = sample_type,
50 time_period = time_period,
51 place_id = place_id,
52 value = value
53 )
54 else:
55 existing = records.first()
56 assert nearly(existing.value, value), (existing.value, value, place_id)
57
58
59
60import datetime
61
62def import_climate_readings(
63 netcdf_file,
64 database_table,
65 add_reading,
66 start_time = datetime.date(1971,1,1),
67 is_undefined = lambda x: -99.900003 < x < -99.9
68):
69 """
70 Assumptions:
71 * there are no places
72 * the data is in order of places
73 """
74 variables = netcdf_file.variables
75
76 # create grid of places
77 place_ids = {}
78
79 def to_list(variable):
80 result = []
81 for i in range(len(variable)):
82 result.append(variable[i])
83 return result
84
85 def iter_pairs(list):
86 for index in range(len(list)):
87 yield index, list[index]
88
89 times = to_list(variables["time"])
90 lat = to_list(variables["lat"])
91 lon = to_list(variables["lon"])
92 for latitude in lat:
93 for longitude in lon:
94 record = get_or_create_record(
95 ClimateDataPortal.place,
96 dict(
97 longitude = longitude,
98 latitude = latitude
99 )
100 )
101 place_ids[(latitude, longitude)] = record
102 #print longitude, latitude, record
103
104 tt = variables["tt"]
105 print "up to:", len(times)
106 for time_index, time in iter_pairs(times):
107 print time_index
108 time_period = start_time+datetime.timedelta(hours=time)
109 for latitude_index, latitude in iter_pairs(lat):
110 for longitude_index, longitude in iter_pairs(lon):
111 value = tt[time_index][latitude_index][longitude_index]
112 if not is_undefined(value):
113 add_reading(
114 database_table = database_table,
115 sample_type = ClimateDataPortal.Gridded,
116 time_period = ClimateDataPortal.date_to_month_number(time_period),
117 place_id = place_ids[(latitude, longitude)],
118 value = value
119 )
120 db.commit()
121
122import sys
123
124from Scientific.IO import NetCDF
125
126file_name = sys.argv[1]
127import_climate_readings(
128 NetCDF.NetCDFFile(file_name),
129 ClimateDataPortal.min_temperature_celsius,
130 add_reading_if_none
131)
0132
=== added file 'modules/ClimateDataPortal/import_stations.py'
--- modules/ClimateDataPortal/import_stations.py 1970-01-01 00:00:00 +0000
+++ modules/ClimateDataPortal/import_stations.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,53 @@
1
2ClimateDataPortal = local_import("ClimateDataPortal")
3
4from decimal import Decimal
5
6def import_stations(file_name):
7 """
8 Expects a file containing lines of the form e.g.:
9226 JALESORE 1122 172 26.65 85.78
10275 PHIDIM (PANCHTH 1419 1205 27.15 87.75
11unused Station name <-id <-elev <-lat <-lon
120123456789012345678901234567890123456789012345678901234567890123456789
130 1 2 3 4 5 6
14 """
15 place = ClimateDataPortal.place
16 observation_station = ClimateDataPortal.observation_station
17 observation_station.truncate()
18 place.truncate()
19 db.commit()
20
21 for line in open(file_name, "r").readlines():
22 try:
23 place_id_text = line[27:33]
24 except IndexError:
25 continue
26 else:
27 try:
28 place_id = int(place_id_text)
29 except ValueError:
30 continue
31 else:
32 station_name = line[8:25].strip() # don't restrict if they add more
33 elevation_metres = int(line[37:43])
34
35 latitude = Decimal(line[47:53])
36 longitude = Decimal(line[57:623])
37
38 assert place.insert(
39 id = place_id,
40 longitude = longitude,
41 latitude = latitude
42 ) == place_id
43
44 station_id = observation_station.insert(
45 id = place_id,
46 name = station_name,
47 elevation_metres = elevation_metres
48 )
49 print place_id, station_name, latitude, longitude, elevation_metres
50 db.commit()
51
52import sys
53import_stations(sys.argv[1])
054
=== added file 'modules/ClimateDataPortal/import_tabbed_readings.py'
--- modules/ClimateDataPortal/import_tabbed_readings.py 1970-01-01 00:00:00 +0000
+++ modules/ClimateDataPortal/import_tabbed_readings.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,154 @@
1
2ClimateDataPortal = local_import("ClimateDataPortal")
3
4from decimal import Decimal
5
6
7def get_or_create(dict, key, creator):
8 try:
9 value = dict[key]
10 except KeyError:
11 value = dict[key] = creator()
12 return value
13
14import os
15
16class Readings(object):
17 def __init__(
18 self,
19 database_table,
20 null_value,
21 maximum = None,
22 minimum = None
23 ):
24 self.database_table = database_table
25 db(database_table.sample_type == ClimateDataPortal.Observed).delete()
26 self.null_value = null_value
27 self.maximum = maximum
28 self.minimum = minimum
29
30 self.aggregated_values = {}
31
32 def add_reading(self, time_period, reading, out_of_range):
33 if reading != self.null_value:
34 if (
35 (self.minimum is not None and reading < self.minimum) or
36 (self.maximum is not None and reading > self.maximum)
37 ):
38 out_of_range(reading)
39 else:
40 readings = get_or_create(
41 self.aggregated_values,
42 time_period,
43 list
44 )
45 readings.append(reading)
46
47 def done(self, place_id):
48 for month_number, values in self.aggregated_values.iteritems():
49 self.database_table.insert(
50 sample_type = ClimateDataPortal.Observed,
51 time_period = month_number,
52 place_id = place_id,
53 value = sum(values) / len(values)
54 )
55
56import datetime
57
58
59def import_tabbed_readings(
60 folder_name,
61 variables = [],
62 place_ids = None
63):
64 """
65 Expects a folder containing files with name rtXXXX.txt
66
67 each file contains lines of the form e.g.:
681978\t1\t1\t0\t-99.9\t-99.9
69
70representing year, month, day, rainfall(mm), minimum and maximum temperature
71 """
72 observation_station = ClimateDataPortal.observation_station
73
74 null_value = Decimal("-99.9") # seems to be
75
76 for row in db(observation_station).select(observation_station.id):
77 place_id = row.id
78 if place_ids is not None:
79 # avoid certain place ids (to allow importing particular places)
80 start_place, end_place = map(int, place_ids.split(":"))
81 assert start_place <= end_place
82 if place_id < start_place or place_id > end_place:
83 continue
84 print place_id
85
86 data_file_path = os.path.join(folder_name, "rt%04i.txt" % place_id)
87 if not os.path.exists(data_file_path):
88 print "%s not found" % data_file_path
89 else:
90 try:
91 for line in open(data_file_path, "r").readlines():
92 if line:
93 data = line.split()
94 if data:
95 try:
96 year = int(data[0])
97 month = int(data[1])
98 day = int(data[2])
99
100 time_period = ClimateDataPortal.year_month_to_month_number(year, month)
101
102 for variable, reading_data in zip(
103 variables,
104 data[3:6]
105 ):
106 def out_of_range(reading):
107 print "%s/%s/%s: %s out of range" % (
108 day, month, year, reading
109 )
110 reading = Decimal(reading_data)
111 variable.add_reading(
112 time_period,
113 reading,
114 out_of_range = out_of_range
115 )
116
117 except Exception, exception:
118 print exception
119 for variable in variables:
120 variable.done(place_id)
121 except:
122 print line
123 raise
124
125 db.commit()
126 else:
127 print "No stations!"
128
129import sys
130
131null_value = Decimal("-99.9")
132import_tabbed_readings(
133 folder_name = sys.argv[1],
134 variables = [
135 Readings(
136 ClimateDataPortal.rainfall_mm,
137 null_value = null_value,
138 minimum = 0,
139 ),
140 Readings(
141 database_table = ClimateDataPortal.min_temperature_celsius,
142 null_value = null_value,
143 minimum = -120,
144 maximum = 55
145 ),
146 Readings(
147 database_table = ClimateDataPortal.max_temperature_celsius,
148 null_value = null_value,
149 minimum = -120,
150 maximum = 55
151 ),
152 ],
153 place_ids = sys.argv[2:] or None
154)
0155
=== modified file 'modules/s3/s3gis.py'
--- modules/s3/s3gis.py 2011-09-05 22:18:45 +0000
+++ modules/s3/s3gis.py 2011-09-06 11:51:25 +0000
@@ -74,16 +74,11 @@
74 Provide an easy, safe, systematic way of handling Debug output74 Provide an easy, safe, systematic way of handling Debug output
75 (print to stdout doesn't work with WSGI deployments)75 (print to stdout doesn't work with WSGI deployments)
76 """76 """
77 try:77 # should be using python's built-in logging module
78 output = "S3 Debug: %s" % str(message)78 output = u"S3 Debug: %s" % unicode(message)
79 if value:79 if value:
80 output += ": %s" % str(value)80 output += u": %s" % unicode(value)
81 except:81 sys.stderr.write(output+"\n")
82 output = "S3 Debug: %s" % unicode(message)
83 if value:
84 output += ": %s" % unicode(value)
85
86 print >> sys.stderr, output
8782
88SHAPELY = False83SHAPELY = False
89try:84try:
@@ -240,7 +235,6 @@
240 """235 """
241236
242 def __init__(self):237 def __init__(self):
243
244 self.deployment_settings = current.deployment_settings238 self.deployment_settings = current.deployment_settings
245 self.public_url = current.deployment_settings.get_base_public_url()239 self.public_url = current.deployment_settings.get_base_public_url()
246 if not current.db is not None:240 if not current.db is not None:
@@ -308,6 +302,18 @@
308 else:302 else:
309 return wkt303 return wkt
310304
305 def debug(self, message, value=None):
306 # should be using python's built-in logging module
307 session = current.session
308 if session.s3.debug:
309 raise Exception(message)
310 else:
311 output = u"S3 Debug: %s" % unicode(message)
312 if value:
313 output += u": %s" % unicode(value)
314 sys.stderr.write(output+"\n")
315 session.error = current.T(message)
316
311 # -------------------------------------------------------------------------317 # -------------------------------------------------------------------------
312 def download_kml(self, record_id, filename):318 def download_kml(self, record_id, filename):
313 """319 """
@@ -326,11 +332,11 @@
326 db = current.db332 db = current.db
327333
328 layer = KMLLayer(self)334 layer = KMLLayer(self)
335
329 query = (layer.table.id == record_id)336 query = (layer.table.id == record_id)
330 record = db(query).select(limitby=(0, 1)).first()337 record = db(query).select(limitby=(0, 1)).first()
331 url = record.url338 url = record.url
332339
333 layer.add_record(record)
334 cachepath = layer.cachepath340 cachepath = layer.cachepath
335 filepath = os.path.join(cachepath, filename)341 filepath = os.path.join(cachepath, filename)
336342
@@ -427,7 +433,7 @@
427 myfile = zipfile.ZipFile(fp)433 myfile = zipfile.ZipFile(fp)
428 try:434 try:
429 file = myfile.read("doc.kml")435 file = myfile.read("doc.kml")
430 except:436 except: # Naked except!!
431 file = myfile.read(myfile.infolist()[0].filename)437 file = myfile.read(myfile.infolist()[0].filename)
432 myfile.close()438 myfile.close()
433439
@@ -531,7 +537,7 @@
531 try:537 try:
532 lon = features[0].lon538 lon = features[0].lon
533 simple = True539 simple = True
534 except:540 except AttributeError:
535 simple = False541 simple = False
536542
537 for feature in features:543 for feature in features:
@@ -544,7 +550,7 @@
544 # A Join550 # A Join
545 lon = feature.gis_location.lon551 lon = feature.gis_location.lon
546 lat = feature.gis_location.lat552 lat = feature.gis_location.lat
547 except:553 except AttributeError:
548 # Skip any rows without the necessary lat/lon fields554 # Skip any rows without the necessary lat/lon fields
549 continue555 continue
550556
@@ -985,7 +991,8 @@
985 _marker = db.gis_marker991 _marker = db.gis_marker
986 _projection = db.gis_projection992 _projection = db.gis_projection
987 have_tables = _config and _projection993 have_tables = _config and _projection
988 except:994 except Exception, exception:
995 self.debug(exception)
989 have_tables = False996 have_tables = False
990997
991 row = None998 row = None
@@ -1002,16 +1009,13 @@
1002 if not row:1009 if not row:
1003 if auth.is_logged_in():1010 if auth.is_logged_in():
1004 # Read personalised config, if available.1011 # Read personalised config, if available.
1005 try:1012 query = (db.pr_person.uuid == auth.user.person_uuid) & \
1006 query = (db.pr_person.uuid == auth.user.person_uuid) & \1013 (_config.pe_id == db.pr_person.pe_id) & \
1007 (_config.pe_id == db.pr_person.pe_id) & \1014 (_marker.id == _config.marker_id) & \
1008 (_marker.id == _config.marker_id) & \1015 (_projection.id == _config.projection_id)
1009 (_projection.id == _config.projection_id)1016 row = db(query).select(limitby=(0, 1)).first()
1010 row = db(query).select(limitby=(0, 1)).first()1017 if row:
1011 if row:1018 config_id = row["gis_config"].id
1012 config_id = row["gis_config"].id
1013 except:
1014 pass
1015 if not row:1019 if not row:
1016 # No personal config or not logged in. Use site default.1020 # No personal config or not logged in. Use site default.
1017 config_id = 11021 config_id = 1
@@ -1148,7 +1152,7 @@
1148 if level:1152 if level:
1149 try:1153 try:
1150 return location_hierarchy[level]1154 return location_hierarchy[level]
1151 except:1155 except KeyError:
1152 return level1156 return level
1153 else:1157 else:
1154 return location_hierarchy1158 return location_hierarchy
@@ -1197,7 +1201,8 @@
1197 if level:1201 if level:
1198 try:1202 try:
1199 return all_levels[level]1203 return all_levels[level]
1200 except:1204 except Exception, exception:
1205
1201 return level1206 return level
1202 else:1207 else:
1203 return all_levels1208 return all_levels
@@ -1472,7 +1477,7 @@
1472 represent = db(table.id == value).select(table.name,1477 represent = db(table.id == value).select(table.name,
1473 cache=cache,1478 cache=cache,
1474 limitby=(0, 1)).first().name1479 limitby=(0, 1)).first().name
1475 except:1480 except: # @ToDo: provide specific exception
1476 # Keep the default from earlier1481 # Keep the default from earlier
1477 pass1482 pass
14781483
@@ -1512,24 +1517,21 @@
1512 lat_max = location.lat_max1517 lat_max = location.lat_max
15131518
1514 else:1519 else:
1515 s3_debug("Location searched within isn't a Polygon!")1520 self.debug("Location searched within isn't a Polygon!")
1516 session.error = T("Location searched within isn't a Polygon!")
1517 return None1521 return None
1518 except:1522 except: # @ToDo: need specific exception
1519 wkt = location1523 wkt = location
1520 if (wkt.startswith("POLYGON") or wkt.startswith("MULTIPOLYGON")):1524 if (wkt.startswith("POLYGON") or wkt.startswith("MULTIPOLYGON")):
1521 # ok1525 # ok
1522 lon_min = None1526 lon_min = None
1523 else:1527 else:
1524 s3_debug("This isn't a Polygon!")1528 self.debug("This isn't a Polygon!")
1525 session.error = T("This isn't a Polygon!")
1526 return None1529 return None
15271530
1528 try:1531 try:
1529 polygon = wkt_loads(wkt)1532 polygon = wkt_loads(wkt)
1530 except:1533 except: # @ToDo: need specific exception
1531 s3_debug("Invalid Polygon!")1534 self.debug("Invalid Polygon!")
1532 session.error = T("Invalid Polygon!")
1533 return None1535 return None
15341536
1535 table = db[tablename]1537 table = db[tablename]
@@ -1537,8 +1539,7 @@
15371539
1538 if "location_id" not in table.fields():1540 if "location_id" not in table.fields():
1539 # @ToDo: Add any special cases to be able to find the linked location1541 # @ToDo: Add any special cases to be able to find the linked location
1540 s3_debug("This table doesn't have a location_id!")1542 self.debug("This table doesn't have a location_id!")
1541 session.error = T("This table doesn't have a location_id!")
1542 return None1543 return None
15431544
1544 query = (table.location_id == locations.id)1545 query = (table.location_id == locations.id)
@@ -1570,7 +1571,10 @@
1570 # Save Record1571 # Save Record
1571 output.records.append(row)1572 output.records.append(row)
1572 except shapely.geos.ReadingError:1573 except shapely.geos.ReadingError:
1573 s3_debug("Error reading wkt of location with id", row.id)1574 self.debug(
1575 "Error reading wkt of location with id",
1576 value=row.id
1577 )
1574 else:1578 else:
1575 # 1st check for Features included within the bbox (faster)1579 # 1st check for Features included within the bbox (faster)
1576 def in_bbox(row):1580 def in_bbox(row):
@@ -1596,7 +1600,10 @@
1596 # Save Record1600 # Save Record
1597 output.records.append(row)1601 output.records.append(row)
1598 except shapely.geos.ReadingError:1602 except shapely.geos.ReadingError:
1599 s3_debug("Error reading wkt of location with id", row.id)1603 self.debug(
1604 "Error reading wkt of location with id",
1605 value = row.id,
1606 )
16001607
1601 return output1608 return output
16021609
@@ -2072,38 +2079,41 @@
2072 current_row += 12079 current_row += 1
2073 try:2080 try:
2074 name0 = row.pop("ADM0_NAME")2081 name0 = row.pop("ADM0_NAME")
2075 except:2082 except KeyError:
2076 name0 = ""2083 name0 = ""
2077 try:2084 try:
2078 name1 = row.pop("ADM1_NAME")2085 name1 = row.pop("ADM1_NAME")
2079 except:2086 except KeyError:
2080 name1 = ""2087 name1 = ""
2081 try:2088 try:
2082 name2 = row.pop("ADM2_NAME")2089 name2 = row.pop("ADM2_NAME")
2083 except:2090 except KeyError:
2084 name2 = ""2091 name2 = ""
2085 try:2092 try:
2086 name3 = row.pop("ADM3_NAME")2093 name3 = row.pop("ADM3_NAME")
2087 except:2094 except KeyError:
2088 name3 = ""2095 name3 = ""
2089 try:2096 try:
2090 name4 = row.pop("ADM4_NAME")2097 name4 = row.pop("ADM4_NAME")
2091 except:2098 except KeyError:
2092 name4 = ""2099 name4 = ""
2093 try:2100 try:
2094 name5 = row.pop("ADM5_NAME")2101 name5 = row.pop("ADM5_NAME")
2095 except:2102 except KeyError:
2096 name5 = ""2103 name5 = ""
20972104
2098 if not name5 and not name4 and not name3 and \2105 if not name5 and not name4 and not name3 and \
2099 not name2 and not name1:2106 not name2 and not name1:
2100 # We need a name! (L0's are already in DB)2107 # We need a name! (L0's are already in DB)
2101 s3_debug("No name provided", current_row)2108 s3_debug(
2109 "No name provided",
2110 current_row,
2111 )
2102 continue2112 continue
21032113
2104 try:2114 try:
2105 wkt = row.pop("WKT")2115 wkt = row.pop("WKT")
2106 except:2116 except KeyError:
2107 wkt = None2117 wkt = None
2108 try:2118 try:
2109 lat = row.pop("LAT")2119 lat = row.pop("LAT")
@@ -2112,21 +2122,17 @@
2112 lat = None2122 lat = None
2113 lon = None2123 lon = None
21142124
2115 if domain:2125 try:
2116 try:2126 uuid = row.pop("UUID")
2117 uuid = "%s/%s" % (domain,2127 except KeyError:
2118 row.pop("UUID"))2128 uuid = ""
2119 except:
2120 uuid = ""
2121 else:2129 else:
2122 try:2130 if domain:
2123 uuid = row.pop("UUID")2131 uuid = "%s/%s" % (domain, uuid)
2124 except:
2125 uuid = ""
21262132
2127 try:2133 try:
2128 code = row.pop("CODE")2134 code = row.pop("CODE")
2129 except:2135 except KeyError:
2130 code = ""2136 code = ""
21312137
2132 population = ""2138 population = ""
@@ -2172,7 +2178,7 @@
2172 # Calculate Centroid & Bounds2178 # Calculate Centroid & Bounds
2173 if wkt:2179 if wkt:
2174 try:2180 try:
2175 # Valid WKT2181 # Valid WKT
2176 shape = wkt_loads(wkt)2182 shape = wkt_loads(wkt)
2177 centroid_point = shape.centroid2183 centroid_point = shape.centroid
2178 lon = centroid_point.x2184 lon = centroid_point.x
@@ -2186,7 +2192,7 @@
2186 feature_type = 1 # Point2192 feature_type = 1 # Point
2187 else:2193 else:
2188 feature_type = 3 # Polygon2194 feature_type = 3 # Polygon
2189 except:2195 except: # @ToDo: provide specific exception
2190 s3_debug("Invalid WKT", name)2196 s3_debug("Invalid WKT", name)
2191 continue2197 continue
2192 else:2198 else:
@@ -2326,7 +2332,7 @@
2326 else:2332 else:
2327 cached = False2333 cached = False
2328 if not os.access(cachepath, os.W_OK):2334 if not os.access(cachepath, os.W_OK):
2329 s3_debug("Folder not writable", cachepath)2335 self.debug("Folder not writable", cachepath)
2330 return2336 return
23312337
2332 if not cached:2338 if not cached:
@@ -2335,11 +2341,11 @@
2335 f = fetch(url)2341 f = fetch(url)
2336 except (urllib2.URLError,):2342 except (urllib2.URLError,):
2337 e = sys.exc_info()[1]2343 e = sys.exc_info()[1]
2338 s3_debug("URL Error", e)2344 self.debug("URL Error", e)
2339 return2345 return
2340 except (urllib2.HTTPError,):2346 except (urllib2.HTTPError,):
2341 e = sys.exc_info()[1]2347 e = sys.exc_info()[1]
2342 s3_debug("HTTP Error", e)2348 self.debug("HTTP Error", e)
2343 return2349 return
23442350
2345 # Unzip File2351 # Unzip File
@@ -2352,8 +2358,8 @@
2352 # For now, 2.5 users need to download/unzip manually to cache folder2358 # For now, 2.5 users need to download/unzip manually to cache folder
2353 myfile.extract(filename, cachepath)2359 myfile.extract(filename, cachepath)
2354 myfile.close()2360 myfile.close()
2355 except:2361 except IOError:
2356 s3_debug("Zipfile contents don't seem correct!")2362 self.debug("Zipfile contents don't seem correct!")
2357 myfile.close()2363 myfile.close()
2358 return2364 return
23592365
@@ -2469,7 +2475,7 @@
2469 # Should be just a single parent2475 # Should be just a single parent
2470 break2476 break
2471 except shapely.geos.ReadingError:2477 except shapely.geos.ReadingError:
2472 s3_debug("Error reading wkt of location with id", row.id)2478 self.debug("Error reading wkt of location with id", row.id)
24732479
2474 # Add entry to database2480 # Add entry to database
2475 table.insert(uuid=uuid,2481 table.insert(uuid=uuid,
@@ -2489,7 +2495,7 @@
2489 else:2495 else:
2490 continue2496 continue
24912497
2492 s3_debug("All done!")2498 self.debug("All done!")
2493 return2499 return
24942500
2495 # -------------------------------------------------------------------------2501 # -------------------------------------------------------------------------
@@ -2732,7 +2738,7 @@
27322738
2733 db = current.db2739 db = current.db
2734 in_bbox = self.query_features_by_bbox(*shape.bounds)2740 in_bbox = self.query_features_by_bbox(*shape.bounds)
2735 has_wkt = (db.gis_location.wkt != None) & (db.gis_location.wkt != '')2741 has_wkt = (db.gis_location.wkt != None) & (db.gis_location.wkt != "")
27362742
2737 for loc in db(in_bbox & has_wkt).select():2743 for loc in db(in_bbox & has_wkt).select():
2738 try:2744 try:
@@ -2740,7 +2746,7 @@
2740 if location_shape.intersects(shape):2746 if location_shape.intersects(shape):
2741 yield loc2747 yield loc
2742 except shapely.geos.ReadingError:2748 except shapely.geos.ReadingError:
2743 s3_debug("Error reading wkt of location with id", loc.id)2749 self.debug("Error reading wkt of location with id", loc.id)
27442750
2745 # -------------------------------------------------------------------------2751 # -------------------------------------------------------------------------
2746 def _get_features_by_latlon(self, lat, lon):2752 def _get_features_by_latlon(self, lat, lon):
@@ -2796,7 +2802,7 @@
2796 try :2802 try :
2797 shape = wkt_loads(location.wkt)2803 shape = wkt_loads(location.wkt)
2798 except:2804 except:
2799 s3_debug("Error reading WKT", location.wkt)2805 self.debug("Error reading WKT", location.wkt)
2800 continue2806 continue
2801 bounds = shape.bounds2807 bounds = shape.bounds
2802 table[location.id] = dict(2808 table[location.id] = dict(
@@ -2947,7 +2953,12 @@
2947 map_width = width2953 map_width = width
2948 else:2954 else:
2949 map_width = config.map_width2955 map_width = config.map_width
2950 if bbox and (-90 < bbox["max_lat"] < 90) and (-90 < bbox["min_lat"] < 90) and (-180 < bbox["max_lon"] < 180) and (-180 < bbox["min_lon"] < 180):2956 if (bbox
2957 and (-90 < bbox["max_lat"] < 90)
2958 and (-90 < bbox["min_lat"] < 90)
2959 and (-180 < bbox["max_lon"] < 180)
2960 and (-180 < bbox["min_lon"] < 180)
2961 ):
2951 # We have sane Bounds provided, so we should use them2962 # We have sane Bounds provided, so we should use them
2952 pass2963 pass
2953 else:2964 else:
@@ -2971,15 +2982,21 @@
2971 projection = config.epsg2982 projection = config.epsg
29722983
29732984
2974 if projection != 900913 and projection != 4326:2985 if projection not in (900913, 4326):
2975 # Test for Valid Projection file in Proj4JS library2986 # Test for Valid Projection file in Proj4JS library
2976 projpath = os.path.join(request.folder, "static", "scripts", "gis", "proj4js", "lib", "defs", "EPSG%s.js" % projection)2987 projpath = os.path.join(
2988 request.folder, "static", "scripts", "gis", "proj4js", \
2989 "lib", "defs", "EPSG%s.js" % projection
2990 )
2977 try:2991 try:
2978 f = open(projpath, "r")2992 f = open(projpath, "r")
2979 f.close()2993 f.close()
2980 except:2994 except:
2981 session.error = "%s /static/scripts/gis/proj4js/lib/defs" % T("Projection not supported - please add definition to")2995 session.error = "'%s' %s /static/scripts/gis/proj4js/lib/defs" % (
2982 redirect(URL(c="gis", f="projection"))2996 projection,
2997 T("Projection not supported - please add definition to")
2998 )
2999 redirect(URL(r=request, c="gis", f="projection"))
29833000
2984 units = config.units3001 units = config.units
2985 maxResolution = config.maxResolution3002 maxResolution = config.maxResolution
@@ -3052,11 +3069,12 @@
3052 #########3069 #########
3053 # Scripts3070 # Scripts
3054 #########3071 #########
3072
3055 def add_javascript(script):3073 def add_javascript(script):
3056 if type(script) == SCRIPT:3074 if type(script) == SCRIPT:
3057 html.append(script)3075 html.append(script)
3058 elif script.startswith("http"):3076 elif script.startswith("http"):
3059 html.append(3077 html.append(
3060 SCRIPT(_type="text/javascript",3078 SCRIPT(_type="text/javascript",
3061 _src=script))3079 _src=script))
3062 else:3080 else:
@@ -3066,7 +3084,7 @@
30663084
3067 debug = session.s3.debug3085 debug = session.s3.debug
3068 if debug:3086 if debug:
3069 if projection != 900913 and projection != 4326:3087 if projection not in (900913, 4326):
3070 add_javascript("scripts/gis/proj4js/lib/proj4js-combined.js")3088 add_javascript("scripts/gis/proj4js/lib/proj4js-combined.js")
3071 add_javascript("scripts/gis/proj4js/lib/defs/EPSG%s.js" % projection)3089 add_javascript("scripts/gis/proj4js/lib/defs/EPSG%s.js" % projection)
30723090
@@ -3079,7 +3097,7 @@
3079 add_javascript("scripts/gis/usng2.js")3097 add_javascript("scripts/gis/usng2.js")
3080 add_javascript("scripts/gis/MP.js")3098 add_javascript("scripts/gis/MP.js")
3081 else:3099 else:
3082 if projection != 900913 and projection != 4326:3100 if projection not in (900913, 4326):
3083 add_javascript("scripts/gis/proj4js/lib/proj4js-compressed.js")3101 add_javascript("scripts/gis/proj4js/lib/proj4js-compressed.js")
3084 add_javascript("scripts/gis/proj4js/lib/defs/EPSG%s.js" % projection)3102 add_javascript("scripts/gis/proj4js/lib/defs/EPSG%s.js" % projection)
3085 add_javascript("scripts/gis/OpenLayers.js")3103 add_javascript("scripts/gis/OpenLayers.js")
@@ -3181,20 +3199,20 @@
3181 # If we do come back to it, then it should be moved to static3199 # If we do come back to it, then it should be moved to static
3182 if print_tool:3200 if print_tool:
3183 url = print_tool["url"]3201 url = print_tool["url"]
3184 url+'' # check url can be concatenated with strings3202 url+"" # check url can be concatenated with strings
3185 if "title" in print_tool:3203 if "title" in print_tool:
3186 mapTitle = str(print_tool["mapTitle"])3204 mapTitle = unicode(print_tool["mapTitle"])
3187 else:3205 else:
3188 mapTitle = str(T("Map from Sahana Eden"))3206 mapTitle = unicode(T("Map from Sahana Eden"))
3189 if "subtitle" in print_tool:3207 if "subtitle" in print_tool:
3190 subTitle = str(print_tool["subTitle"])3208 subTitle = unicode(print_tool["subTitle"])
3191 else:3209 else:
3192 subTitle = str(T("Printed from Sahana Eden"))3210 subTitle = unicode(T("Printed from Sahana Eden"))
3193 if session.auth:3211 if session.auth:
3194 creator = session.auth.user.email3212 creator = unicode(session.auth.user.email)
3195 else:3213 else:
3196 creator = ""3214 creator = ""
3197 print_tool1 = "".join(("""3215 print_tool1 = u"".join(("""
3198 if (typeof(printCapabilities) != 'undefined') {3216 if (typeof(printCapabilities) != 'undefined') {
3199 // info.json from script headers OK3217 // info.json from script headers OK
3200 printProvider = new GeoExt.data.PrintProvider({3218 printProvider = new GeoExt.data.PrintProvider({
@@ -3218,7 +3236,7 @@
3218 // printProvider: printProvider3236 // printProvider: printProvider
3219 //});3237 //});
3220 // A layer to display the print page extent3238 // A layer to display the print page extent
3221 //var pageLayer = new OpenLayers.Layer.Vector('""", str(T("Print Extent")), """');3239 //var pageLayer = new OpenLayers.Layer.Vector('""", unicode(T("Print Extent")), """');
3222 //pageLayer.addFeatures(printPage.feature);3240 //pageLayer.addFeatures(printPage.feature);
3223 //pageLayer.setVisibility(false);3241 //pageLayer.setVisibility(false);
3224 //map.addLayer(pageLayer);3242 //map.addLayer(pageLayer);
@@ -3234,7 +3252,7 @@
3234 //});3252 //});
3235 // The form with fields controlling the print output3253 // The form with fields controlling the print output
3236 S3.gis.printFormPanel = new Ext.form.FormPanel({3254 S3.gis.printFormPanel = new Ext.form.FormPanel({
3237 title: '""", str(T("Print Map")), """',3255 title: '""", unicode(T("Print Map")), """',
3238 rootVisible: false,3256 rootVisible: false,
3239 split: true,3257 split: true,
3240 autoScroll: true,3258 autoScroll: true,
@@ -3247,7 +3265,7 @@
3247 defaults: {anchor: '100%%'},3265 defaults: {anchor: '100%%'},
3248 listeners: {3266 listeners: {
3249 'expand': function() {3267 'expand': function() {
3250 //if (null == mapPanel.map.getLayersByName('""", str(T("Print Extent")), """')[0]) {3268 //if (null == mapPanel.map.getLayersByName('""", unicode(T("Print Extent")), """')[0]) {
3251 // mapPanel.map.addLayer(pageLayer);3269 // mapPanel.map.addLayer(pageLayer);
3252 //}3270 //}
3253 if (null == mapPanel.plugins[0]) {3271 if (null == mapPanel.plugins[0]) {
@@ -3275,7 +3293,7 @@
3275 xtype: 'textarea',3293 xtype: 'textarea',
3276 name: 'comment',3294 name: 'comment',
3277 value: '',3295 value: '',
3278 fieldLabel: '""", str(T("Comment")), """',3296 fieldLabel: '""", unicode(T("Comment")), """',
3279 plugins: new GeoExt.plugins.PrintPageField({3297 plugins: new GeoExt.plugins.PrintPageField({
3280 printPage: printPage3298 printPage: printPage
3281 })3299 })
@@ -3283,7 +3301,7 @@
3283 xtype: 'combo',3301 xtype: 'combo',
3284 store: printProvider.layouts,3302 store: printProvider.layouts,
3285 displayField: 'name',3303 displayField: 'name',
3286 fieldLabel: '""", str(T("Layout")), """',3304 fieldLabel: '""", T("Layout").decode("utf-8"), """',
3287 typeAhead: true,3305 typeAhead: true,
3288 mode: 'local',3306 mode: 'local',
3289 triggerAction: 'all',3307 triggerAction: 'all',
@@ -3294,7 +3312,7 @@
3294 xtype: 'combo',3312 xtype: 'combo',
3295 store: printProvider.dpis,3313 store: printProvider.dpis,
3296 displayField: 'name',3314 displayField: 'name',
3297 fieldLabel: '""", str(T("Resolution")), """',3315 fieldLabel: '""", unicode(T("Resolution")), """',
3298 tpl: '<tpl for="."><div class="x-combo-list-item">{name} dpi</div></tpl>',3316 tpl: '<tpl for="."><div class="x-combo-list-item">{name} dpi</div></tpl>',
3299 typeAhead: true,3317 typeAhead: true,
3300 mode: 'local',3318 mode: 'local',
@@ -3311,7 +3329,7 @@
3311 // xtype: 'combo',3329 // xtype: 'combo',
3312 // store: printProvider.scales,3330 // store: printProvider.scales,
3313 // displayField: 'name',3331 // displayField: 'name',
3314 // fieldLabel: '""", str(T("Scale")), """',3332 // fieldLabel: '""", unicode(T("Scale")), """',
3315 // typeAhead: true,3333 // typeAhead: true,
3316 // mode: 'local',3334 // mode: 'local',
3317 // triggerAction: 'all',3335 // triggerAction: 'all',
@@ -3321,13 +3339,13 @@
3321 //}, {3339 //}, {
3322 // xtype: 'textfield',3340 // xtype: 'textfield',
3323 // name: 'rotation',3341 // name: 'rotation',
3324 // fieldLabel: '""", str(T("Rotation")), """',3342 // fieldLabel: '""", unicode(T("Rotation")), """',
3325 // plugins: new GeoExt.plugins.PrintPageField({3343 // plugins: new GeoExt.plugins.PrintPageField({
3326 // printPage: printPage3344 // printPage: printPage
3327 // })3345 // })
3328 }],3346 }],
3329 buttons: [{3347 buttons: [{
3330 text: '""", str(T("Create PDF")), """',3348 text: '""", unicode(T("Create PDF")), """',
3331 handler: function() {3349 handler: function() {
3332 // the PrintExtent plugin is the mapPanel's 1st plugin3350 // the PrintExtent plugin is the mapPanel's 1st plugin
3333 //mapPanel.plugins[0].print();3351 //mapPanel.plugins[0].print();
@@ -3345,7 +3363,7 @@
3345 } else {3363 } else {
3346 // Display error diagnostic3364 // Display error diagnostic
3347 S3.gis.printFormPanel = new Ext.Panel ({3365 S3.gis.printFormPanel = new Ext.Panel ({
3348 title: '""", str(T("Print Map")), """',3366 title: '""", unicode(T("Print Map")), """',
3349 rootVisible: false,3367 rootVisible: false,
3350 split: true,3368 split: true,
3351 autoScroll: true,3369 autoScroll: true,
@@ -3356,7 +3374,7 @@
3356 bodyStyle: 'padding:5px',3374 bodyStyle: 'padding:5px',
3357 labelAlign: 'top',3375 labelAlign: 'top',
3358 defaults: {anchor: '100%'},3376 defaults: {anchor: '100%'},
3359 html: '""", str(T("Printing disabled since server not accessible")), """: <BR />""", url, """'3377 html: '""", unicode(T("Printing disabled since server not accessible")), """: <BR />""", unicode(url), """'
3360 });3378 });
3361 }3379 }
3362 """))3380 """))
@@ -3442,40 +3460,40 @@
3442 name_safe = re.sub("'", "", layer.name)3460 name_safe = re.sub("'", "", layer.name)
3443 if layer.url2:3461 if layer.url2:
3444 url2 = """,3462 url2 = """,
3445 url2: '%s'""" % layer.url23463 "url2": "%s\"""" % layer.url2
3446 else:3464 else:
3447 url2 = ""3465 url2 = ""
3448 if layer.url3:3466 if layer.url3:
3449 url3 = """,3467 url3 = """,
3450 url3: '%s'""" % layer.url33468 "url3": "%s\"""" % layer.url3
3451 else:3469 else:
3452 url3 = ""3470 url3 = ""
3453 if layer.base:3471 if layer.base:
3454 base = ""3472 base = ""
3455 else:3473 else:
3456 base = """,3474 base = """,
3457 isBaseLayer: false"""3475 "isBaseLayer": false"""
3458 if layer.visible:3476 if layer.visible:
3459 visibility = ""3477 visibility = ""
3460 else:3478 else:
3461 visibility = """,3479 visibility = """,
3462 visibility: false"""3480 "visibility": false"""
3463 if layer.attribution:3481 if layer.attribution:
3464 attribution = """,3482 attribution = """,
3465 attribution: '%s'""" % layer.attribution3483 "attribution": %s""" % repr(layer.attribution)
3466 else:3484 else:
3467 attribution = ""3485 attribution = ""
3468 if layer.zoom_levels is not None and layer.zoom_levels != 19:3486 if layer.zoom_levels is not None and layer.zoom_levels != 19:
3469 zoomLevels = """,3487 zoomLevels = """,
3470 zoomLevels: %i""" % layer.zoom_levels3488 "zoomLevels": %i""" % layer.zoom_levels
3471 else:3489 else:
3472 zoomLevels = ""3490 zoomLevels = ""
34733491
3474 # Generate JS snippet to pass to static3492 # Generate JS snippet to pass to static
3475 layers_osm += """3493 layers_osm += """
3476S3.gis.layers_osm[%i] = {3494S3.gis.layers_osm[%i] = {
3477 name: '%s',3495 "name": "%s",
3478 url1: '%s'%s%s%s%s%s%s3496 "url1": "%s"%s%s%s%s%s%s
3479}3497}
3480""" % (counter,3498""" % (counter,
3481 name_safe,3499 name_safe,
@@ -3487,6 +3505,7 @@
3487 attribution,3505 attribution,
3488 zoomLevels)3506 zoomLevels)
34893507
3508
3490 # ---------------------------------------------------------------------3509 # ---------------------------------------------------------------------
3491 # XYZ3510 # XYZ
3492 # @ToDo: Migrate to Class/Static3511 # @ToDo: Migrate to Class/Static
@@ -3678,7 +3697,7 @@
36783697
3679 if "active" in layer and not layer["active"]:3698 if "active" in layer and not layer["active"]:
3680 visibility = """,3699 visibility = """,
3681 visibility: false"""3700 "visibility": false"""
3682 else:3701 else:
3683 visibility = ""3702 visibility = ""
36843703
@@ -3709,32 +3728,33 @@
3709 marker_url = ""3728 marker_url = ""
3710 if marker_url:3729 if marker_url:
3711 markerLayer = """,3730 markerLayer = """,
3712 marker_url: '%s',3731 "marker_url": "%s",
3713 marker_height: %i,3732 "marker_height": %i,
3714 marker_width: %i""" % (marker_url, marker_height, marker_width)3733 "marker_width": %i""" % (marker_url, marker_height, marker_width)
37153734
3716 if "opacity" in layer and layer["opacity"] != 1:3735 if "opacity" in layer and layer["opacity"] != 1:
3717 opacity = """,3736 opacity = """,
3718 opacity: %.1f""" % layer["opacity"]3737 "opacity": %.1f""" % layer["opacity"]
3719 else:3738 else:
3720 opacity = ""3739 opacity = ""
3721 if "cluster_distance" in layer and layer["cluster_distance"] != self.cluster_distance:3740 if "cluster_distance" in layer and layer["cluster_distance"] != self.cluster_distance:
3722 cluster_distance = """,3741 cluster_distance = """,
3723 cluster_distance: %i""" % layer["cluster_distance"]3742 "cluster_distance": %i""" % layer["cluster_distance"]
3724 else:3743 else:
3725 cluster_distance = ""3744 cluster_distance = ""
3726 if "cluster_threshold" in layer and layer["cluster_threshold"] != self.cluster_threshold:3745 if "cluster_threshold" in layer and layer["cluster_threshold"] != self.cluster_threshold:
3727 cluster_threshold = """,3746 cluster_threshold = """,
3728 cluster_threshold: %i""" % layer["cluster_threshold"]3747 "cluster_threshold": %i""" % layer["cluster_threshold"]
3729 else:3748 else:
3730 cluster_threshold = ""3749 cluster_threshold = ""
37313750
3732 # Generate JS snippet to pass to static3751 # Generate JS snippet to pass to static
3733 layers_feature_queries += """3752 layers_feature_queries += """
3734S3.gis.layers_feature_queries[%i] = {3753S3.gis.layers_feature_queries[%i] = {
3735 name: '%s',3754 "name": "%s",
3736 url: '%s'%s%s%s%s%s3755 "url": "%s"%s%s%s%s%s
3737}""" % (counter,3756}
3757""" % (counter,
3738 name,3758 name,
3739 url,3759 url,
3740 visibility,3760 visibility,
@@ -3742,36 +3762,44 @@
3742 opacity,3762 opacity,
3743 cluster_distance,3763 cluster_distance,
3744 cluster_threshold)3764 cluster_threshold)
37453765
3746 # ---------------------------------------------------------------------3766 # ---------------------------------------------------------------------
3747 # Add Layers from the Catalogue3767 # Add Layers from the Catalogue
3748 # ---------------------------------------------------------------------3768 # ---------------------------------------------------------------------
3749 layers_config = ""3769 layers_config = ""
3750 if catalogue_layers:3770 if catalogue_layers:
3751 for LayerType in [3771 for LayerType in [
3752 #OSMLayer,3772 #OSMLayer,
3753 Bing,3773 BingLayer,
3754 Google,3774 GoogleLayer,
3755 Yahoo,3775 YahooLayer,
3756 TMSLayer,3776 TMSLayer,
3757 WMSLayer,3777 WMSLayer,
3758 FeatureLayer,3778 FeatureLayer,
3759 GeoJSONLayer,3779 GeoJSONLayer,
3760 GeoRSSLayer,3780 GeoRSSLayer,
3761 GPXLayer,3781 GPXLayer,
3762 KMLLayer,3782 KMLLayer,
3763 WFSLayer3783 WFSLayer
3764 ]:3784 ]:
3765 # Instantiate the Class3785 try:
3766 layer = LayerType(self)3786 # Instantiate the Class
3767 layer_type_js = layer.as_javascript()3787 layer = LayerType(self)
3768 if layer_type_js:3788 layer_type_js = layer.as_javascript()
3769 # Add to the output JS3789 if layer_type_js:
3770 layers_config = "".join((layers_config,3790 # Add to the output JS
3771 layer_type_js))3791 layers_config = "".join((layers_config,
3772 if layer.scripts:3792 layer_type_js))
3773 for script in layer.scripts:3793 if layer.scripts:
3774 add_javascript(script)3794 for script in layer.scripts:
3795 add_javascript(script)
3796 except Exception, exception:
3797 if debug:
3798 raise
3799 else:
3800 session.warning.append(
3801 LayerType.__name__ + " not shown due to error"
3802 )
37753803
3776 # -----------------------------------------------------------------3804 # -----------------------------------------------------------------
3777 # Coordinate Grid - only one possible3805 # Coordinate Grid - only one possible
@@ -3845,6 +3873,7 @@
3845 "S3.gis.marker_default_width = %i;\n" % marker_default.width,3873 "S3.gis.marker_default_width = %i;\n" % marker_default.width,
3846 osm_auth,3874 osm_auth,
3847 layers_osm,3875 layers_osm,
3876 layers_feature_queries,
3848 _features,3877 _features,
3849 layers_config,3878 layers_config,
3850 # i18n Labels3879 # i18n Labels
@@ -3877,6 +3906,7 @@
3877 ))))3906 ))))
38783907
3879 # Static Script3908 # Static Script
3909
3880 if debug:3910 if debug:
3881 add_javascript("scripts/S3/s3.gis.js")3911 add_javascript("scripts/S3/s3.gis.js")
3882 add_javascript("scripts/S3/s3.gis.layers.js")3912 add_javascript("scripts/S3/s3.gis.layers.js")
@@ -3886,7 +3916,6 @@
38863916
3887 # Dynamic Script (stuff which should, as far as possible, be moved to static)3917 # Dynamic Script (stuff which should, as far as possible, be moved to static)
3888 html.append(SCRIPT(layers_js + \3918 html.append(SCRIPT(layers_js + \
3889 #layers_xyz + \
3890 print_tool1))3919 print_tool1))
38913920
3892 # Set up map plugins3921 # Set up map plugins
@@ -3901,8 +3930,7 @@
39013930
3902 return html3931 return html
39033932
39043933# -----------------------------------------------------------------------------
3905# =============================================================================
3906class Marker(object):3934class Marker(object):
3907 """ Represents a Map Marker """3935 """ Represents a Map Marker """
3908 def __init__(self, gis, id=None):3936 def __init__(self, gis, id=None):
@@ -3935,6 +3963,13 @@
3935 #self.url = URL(c="static", f="img",3963 #self.url = URL(c="static", f="img",
3936 # args=["markers", marker.image])3964 # args=["markers", marker.image])
39373965
3966 def add_attributes_to_output(self, output):
3967 output.update(
3968 marker_image = self.image,
3969 marker_height = self.height,
3970 marker_width = self.width,
3971 )
3972
3938# -----------------------------------------------------------------------------3973# -----------------------------------------------------------------------------
3939class Projection(object):3974class Projection(object):
3940 """ Represents a Map Projection """3975 """ Represents a Map Projection """
@@ -3960,41 +3995,34 @@
3960 self.epsg = projection.epsg3995 self.epsg = projection.epsg
39613996
3962# -----------------------------------------------------------------------------3997# -----------------------------------------------------------------------------
3998
3999def config_dict(mandatory, defaulted):
4000 d = dict(mandatory)
4001 for key, (value, defaults) in defaulted.iteritems():
4002 if value not in defaults:
4003 d[key] = value
4004 return d
4005
4006
4007# the layer code only needs to do:
4008# any database lookups to get extra data
4009# security checks.
4010
4011# then it generates appropriate JSON strings.
4012
3963class Layer(object):4013class Layer(object):
3964 """4014 """
3965 Base Class for Layers4015 Abstract Base Class for Layers
3966 Not meant to be instantiated direct
3967 """4016 """
3968 def __init__(self, gis, record=None):4017 def __init__(self, gis):
4018 self.gis = gis
3969 db = current.db4019 db = current.db
39704020 try:
3971 self.gis = gis4021 self.table = db[self.table_name]
3972 # This usually arrives later4022 except:
3973 self.record = record4023 current.manager.load(self.table_name)
3974 # Ensure all attributes available (even if Null)4024 self.table = db[tablename]
3975 self._refresh()4025
3976 self.scripts = []
3977
3978 def add_record(self, record):
3979 """
3980 Update the record & refresh the attributes
3981 """
3982 if record:
3983 self.record = record
3984 self._refresh()
3985 else:
3986 return
3987
3988 def as_dict(self):
3989 """
3990 Output the Layer as a Python dictionary
3991 - this is used to build a JSON of the overall dict of layers
3992 """
3993 record = self.record
3994 if record:
3995 return record
3996 else:
3997 return
39984026
3999 def as_json(self):4027 def as_json(self):
4000 """4028 """
@@ -4006,674 +4034,502 @@
4006 else:4034 else:
4007 return4035 return
40084036
4009 def as_javascript(self):4037
4010 """4038
4011 Output the Layer as Javascript4039# -----------------------------------------------------------------------------
4012 - suitable for inclusion in the HTML page4040class SingleRecordLayer(Layer):
4013 """4041 """
4014 gis = self.gis4042 Abstract Base Class for Layers with just a single record
4015 auth = gis.auth4043 """
4016 db = current.db4044
4017 table = self.table4045 def __init__(self, gis):
40184046 super(SingleRecordLayer, self).__init__(gis)
4019 layer_type_list = []4047 table = self.table
4020 # Read the enabled Layers4048 records = current.db(table.id > 0).select()
4021 records = db(table.enabled == True).select()4049 assert len(records) <= 1, (
4022 for record in records:4050 "There should only ever be 0 or 1 %s" % self.__class__.__name__
4023 # Check user is allowed to access the layer4051 )
4024 role_required = record.role_required4052 self.record = None
4025 if (not role_required) or auth.s3_has_role(role_required):4053 record = records.first()
4026 # Pass the record to the Class4054 if record is not None:
4027 self.add_record(record)4055 if record.enabled:
4028 # Read the output dict for this layer4056 role_required = record.role_required
4029 layer_dict = self.as_dict()4057 if not role_required or self.gis.auth.s3_has_role(role_required):
4030 if layer_dict:4058 self.record = record
4031 # Add this layer to the list of layers for this layer type4059 # Refresh the attributes of the Layer
4032 layer_type_list.append(layer_dict)4060 if "apikey" in table:
40334061 if record:
4034 if layer_type_list:4062 self.apikey = record.apikey
4035 # Output the Layer Type as JSON4063 else:
4036 layer_type_json = json.dumps(layer_type_list,4064 self.apikey = None
4037 sort_keys=True,4065 self.gis = gis
4038 indent=4)4066 self.scripts = []
4039 layer_type_js = "".join(("%s = " % self.js_array,4067
4040 layer_type_json,4068 def as_javascript(self):
4041 "\n"))4069 """
4042 return layer_type_js4070 Output the Layer as Javascript
40434071 - suitable for inclusion in the HTML page
4044 def _name_safe(self, name):4072 """
4045 """4073 if self.record:
4046 Make the name safe for use in JSON4074 if "apikey" in self.table and not self.apikey:
4047 i.e. any Unicode character allowed except for " & \4075 raise Exception("Cannot display a %s if we have no valid API Key" % self.__class__.__name__)
4048 """4076 json = self.as_json()
4049 return re.sub('[\\"]', "", name)4077 if json:
40504078 return "%s = %s\n" % (
4051 def _refresh(self):4079 self.js_array,
4052 " Refresh the attributes of the Layer "4080 json
4053 table = self.table4081 )
4054 if "marker_id" in table:4082 else:
4055 self.set_marker()4083 return None
4056 if "projection_id" in table:4084 else:
4057 self.set_projection()4085 return None
40584086
4059 def set_marker(self):4087# -----------------------------------------------------------------------------
4060 " Set the Marker for the Layer "4088class BingLayer(SingleRecordLayer):
4061 gis = self.gis4089 """ Bing Layer from Catalogue """
4062 record = self.record4090 table_name = "gis_layer_bing"
4063 if record:4091 js_array = "S3.gis.Bing"
4064 marker = Marker(gis, record.marker_id)4092
4065 self.marker = marker
4066 else:
4067 self.marker = None
4068
4069 def set_projection(self):
4070 " Set the Projection for the Layer "
4071 gis = self.gis
4072 record = self.record
4073 if record:
4074 projection = Projection(gis, record.projection_id)
4075 self.projection = projection
4076 else:
4077 self.projection = None
4078
4079# -----------------------------------------------------------------------------
4080class OneLayer(Layer):
4081 """
4082 Base Class for Layers with just a single record
4083 Not meant to be instantiated direct
4084 """
4085
4086 def __init__(self, gis, record=None):
4087 db = current.db
4088 tablename = ""
4089 try:
4090 table = db[tablename]
4091 except:
4092 current.manager.load(tablename)
4093 table = db[tablename]
4094 if not record:
4095 # There is only ever 1 layer
4096 record = db(table.id > 0).select().first()
4097
4098 self.gis = gis
4099 self.table = table
4100 self.js_array = "S3.gis.OneLayer"
4101 self.record = record
4102 self._refresh()
4103 self.scripts = []
4104
4105 def as_javascript(self):
4106 """
4107 Output the Layer as Javascript
4108 - suitable for inclusion in the HTML page
4109 """
4110 auth = self.gis.auth
4111 record = self.record
4112 # Check Layer exists in the DB
4113 if not record:
4114 return None
4115 # Check Layer is enabled
4116 if not record.enabled:
4117 return None
4118 # Check user is allowed to access the Layer
4119 role_required = record.role_required
4120 if role_required and not auth.s3_has_role(role_required):
4121 return None
4122 # Read the output JSON for this layer
4123 layer_type_json = self.as_json()
4124 layer_type_js = "".join(("%s = " % self.js_array,
4125 layer_type_json,
4126 "\n"))
4127 return layer_type_js
4128
4129 def _set_api_key(self):
4130 " Set the API Key for the Layer "
4131 record = self.record
4132 if record:
4133 self.apikey = record.apikey
4134 else:
4135 self.apikey = None
4136
4137 def _refresh(self):
4138 " Refresh the attributes of the Layer "
4139 table = self.table
4140 if "apikey" in table:
4141 self._set_api_key()
4142
4143# -----------------------------------------------------------------------------
4144class Bing(OneLayer):
4145 """ Bing Layers from Catalogue """
4146 def __init__(self, gis, record=None):
4147 db = current.db
4148 tablename = "gis_layer_bing"
4149 try:
4150 table = db[tablename]
4151 except:
4152 current.manager.load(tablename)
4153 table = db[tablename]
4154 if not record:
4155 # There is only ever 1 layer
4156 record = db(table.id > 0).select().first()
4157
4158 self.gis = gis
4159 self.table = table
4160 self.js_array = "S3.gis.Bing"
4161 self.record = record
4162 self._refresh()
4163 self.scripts = []
4164
4165 def as_dict(self):4093 def as_dict(self):
4166 gis = self.gis4094 gis = self.gis
4167 record = self.record4095 record = self.record
4168 apikey = self.apikey4096 if record is not None:
41694097 config = self.gis.get_config()
4170 if not apikey:4098 if Projection(gis, id=config.projection_id).epsg != 900913:
4171 # Cannot display Bing layers if we have no valid API Key4099 raise Exception("Cannot display Bing layers unless we're using the Spherical Mercator Projection")
4172 return None4100 else:
41734101 # Mandatory attributes
4174 config = gis.get_config()4102 output = {
4175 if Projection(gis, id=config.projection_id).epsg != 900913:4103 "ApiKey": self.apikey
4176 # Cannot display Bing layers unless we're using the4104 }
4177 # Spherical Mercator Projection4105
4178 return None4106 # Attributes which are defaulted client-side if not set
41794107 if record.aerial_enabled:
4180 # Mandatory attributes4108 output["Aerial"] = record.aerial or "Bing Satellite"
4181 output = {4109 if record.road_enabled:
4182 "ApiKey": self.apikey4110 output["Road"] = record.road or "Bing Roads"
4183 }4111 if record.hybrid_enabled:
41844112 output["Hybrid"] = record.hybrid or "Bing Hybrid"
4185 # Attributes which are defaulted client-side if not set4113 return output
4186 if record.aerial_enabled:4114 else:
4187 output["Aerial"] = record.aerial or "Bing Satellite"4115 return None
4188 if record.road_enabled:
4189 output["Road"] = record.road or "Bing Roads"
4190 if record.hybrid_enabled:
4191 output["Hybrid"] = record.hybrid or "Bing Hybrid"
4192
4193 return output
41944116
4195# -----------------------------------------------------------------------------4117# -----------------------------------------------------------------------------
4196class Google(OneLayer):4118class GoogleLayer(SingleRecordLayer):
4197 """4119 """
4198 Google Layers/Tools from Catalogue4120 Google Layers/Tools from Catalogue
4199 """4121 """
4200 def __init__(self, gis, record=None):4122 table_name = "gis_layer_google"
4201 db = current.db4123 js_array = "S3.gis.Google"
4202 tablename = "gis_layer_google"4124
4203 try:4125 def __init__(self, gis):
4204 table = db[tablename]4126 super(GoogleLayer, self).__init__(gis)
4205 except:4127 record = self.record
4206 current.manager.load(tablename)4128 if record is not None:
4207 table = db[tablename]4129 debug = current.session.s3.debug
4208 debug = current.session.s3.debug4130 add_script = self.scripts.append
4209 if not record:
4210 # There is only ever 1 layer
4211 record = db(table.id > 0).select().first()
4212
4213 self.gis = gis
4214 self.table = table
4215 self.js_array = "S3.gis.Google"
4216 self.record = record
4217 self._refresh()
4218
4219 if record:
4220 if record.mapmaker_enabled or record.mapmakerhybrid_enabled:4131 if record.mapmaker_enabled or record.mapmakerhybrid_enabled:
4221 # Need to use v2 API4132 # Need to use v2 API
4222 # http://code.google.com/p/gmaps-api-issues/issues/detail?id=23494133 # http://code.google.com/p/gmaps-api-issues/issues/detail?id=2349
4223 self.scripts = ["http://maps.google.com/maps?file=api&v=2&key=%s" % self.apikey]4134 add_script("http://maps.google.com/maps?file=api&v=2&key=%s" % self.apikey)
4224 else:4135 else:
4225 # v3 API4136 # v3 API
4226 self.scripts = ["http://maps.google.com/maps/api/js?v=3.2&sensor=false"]4137 add_script("http://maps.google.com/maps/api/js?v=3.2&sensor=false")
4227 if debug and record.streetview_enabled:4138 if debug and record.streetview_enabled:
4228 self.scripts.append("scripts/gis/gxp/widgets/GoogleStreetViewPanel.js")4139 add_script("scripts/gis/gxp/widgets/GoogleStreetViewPanel.js")
4229 if record.earth_enabled:4140 if record.earth_enabled:
4230 self.scripts.append("http://www.google.com/jsapi?key=%s" % self.apikey)4141 add_script("http://www.google.com/jsapi?key=%s" % self.apikey)
4231 self.scripts.append(SCRIPT("google && google.load('earth', '1');", _type="text/javascript"))4142 add_script(SCRIPT("google && google.load('earth', '1');", _type="text/javascript"))
4232 if debug:4143 if debug:
4233 self.scripts.append("scripts/gis/gxp/widgets/GoogleEarthPanel.js")4144 add_script("scripts/gis/gxp/widgets/GoogleEarthPanel.js")
4234 else:
4235 self.scripts = []
42364145
4237 def as_dict(self):4146 def as_dict(self):
4238 gis = self.gis4147 gis = self.gis
4239 T = current.T4148 T = current.T
4240 record = self.record4149 record = self.record
4241 apikey = self.apikey4150 if record is not None:
42424151 config = gis.get_config()
4243 if not apikey and (record.mapmaker_enabled or record.mapmakerhybrid_enabled):4152 if Projection(gis, id=config.projection_id).epsg != 900913:
4244 # Cannot display Google layers if we have no valid API Key4153 if record.earth_enabled:
4154 # But the Google Earth panel can still be enabled
4155 return {
4156 "Earth": str(T("Switch to 3D"))
4157 }
4158 else:
4159 raise Exception("Cannot display Google layers unless we're using the Spherical Mercator Projection")
4160
4161
4162 # Mandatory attributes
4163 #"ApiKey": self.apikey
4164 output = {
4165 }
4166
4167 # Attributes which are defaulted client-side if not set
4168 if record.satellite_enabled:
4169 output["Satellite"] = record.satellite or "Google Satellite"
4170 if record.maps_enabled:
4171 output["Maps"] = record.maps or "Google Maps"
4172 if record.hybrid_enabled:
4173 output["Hybrid"] = record.hybrid or "Google Hybrid"
4174 if record.mapmaker_enabled:
4175 output["MapMaker"] = record.mapmaker or "Google MapMaker"
4176 if record.mapmakerhybrid_enabled:
4177 output["MapMakerHybrid"] = record.mapmakerhybrid or "Google MapMaker Hybrid"
4178 if record.earth_enabled:
4179 output["Earth"] = str(T("Switch to 3D"))
4180 if record.streetview_enabled and not (record.mapmaker_enabled or record.mapmakerhybrid_enabled):
4181 # Streetview doesn't work with v2 API
4182 output["StreetviewButton"] = str(T("Click where you want to open Streetview"))
4183 output["StreetviewTitle"] = str(T("Street View"))
4184
4185 return output
4186 else:
4245 return None4187 return None
42464188
4247 config = gis.get_config()
4248 if Projection(gis, id=config.projection_id).epsg != 900913:
4249 # Cannot display Google layers unless we're using the
4250 # Spherical Mercator Projection
4251 if record.earth_enabled:
4252 # But the Google Earth panel can still be enabled
4253 output = {
4254 "Earth": str(T("Switch to 3D"))
4255 }
4256 return output
4257 else:
4258 return None
4259
4260 # Mandatory attributes
4261 #"ApiKey": self.apikey
4262 output = {
4263 }
4264
4265 # Attributes which are defaulted client-side if not set
4266 if record.satellite_enabled:
4267 output["Satellite"] = record.satellite or "Google Satellite"
4268 if record.maps_enabled:
4269 output["Maps"] = record.maps or "Google Maps"
4270 if record.hybrid_enabled:
4271 output["Hybrid"] = record.hybrid or "Google Hybrid"
4272 if record.mapmaker_enabled:
4273 output["MapMaker"] = record.mapmaker or "Google MapMaker"
4274 if record.mapmakerhybrid_enabled:
4275 output["MapMakerHybrid"] = record.mapmakerhybrid or "Google MapMaker Hybrid"
4276 if record.earth_enabled:
4277 output["Earth"] = str(T("Switch to 3D"))
4278 if record.streetview_enabled and not (record.mapmaker_enabled or record.mapmakerhybrid_enabled):
4279 # Streetview doesn't work with v2 API
4280 output["StreetviewButton"] = str(T("Click where you want to open Streetview"))
4281 output["StreetviewTitle"] = str(T("Street View"))
4282
4283 return output
4284
4285# -----------------------------------------------------------------------------4189# -----------------------------------------------------------------------------
4286class Yahoo(OneLayer):4190class YahooLayer(SingleRecordLayer):
4287 """4191 """
4288 Yahoo Layers from Catalogue4192 Yahoo Layers from Catalogue
42894193
4290 NB This will stop working on 13 September 20114194 NB This will stop working on 13 September 2011
4291 http://developer.yahoo.com/blogs/ydn/posts/2011/06/yahoo-maps-apis-service-closure-announcement-new-maps-offerings-coming-soon/4195 http://developer.yahoo.com/blogs/ydn/posts/2011/06/yahoo-maps-apis-service-closure-announcement-new-maps-offerings-coming-soon/
4292 """4196 """
4293 def __init__(self, gis, record=None):4197 js_array = "S3.gis.Yahoo"
4294 db = current.db4198 table_name = "gis_layer_yahoo"
4295 tablename = "gis_layer_yahoo"4199
4296 try:4200 def __init__(self, gis):
4297 table = db[tablename]4201 super(YahooLayer, self).__init__(gis)
4298 except:4202 if self.record:
4299 current.manager.load(tablename)4203 self.scripts.append("http://api.maps.yahoo.com/ajaxymap?v=3.8&appid=%s" % self.apikey)
4300 table = db[tablename]4204 config = gis.get_config()
4301 if not record:4205 if Projection(gis, id=config.projection_id).epsg != 900913:
4302 # There is only ever 1 layer4206 raise Exception("Cannot display Yahoo layers unless we're using the Spherical Mercator Projection")
4303 record = db(table.id > 0).select().first()
4304
4305 self.gis = gis
4306 self.table = table
4307 self.js_array = "S3.gis.Yahoo"
4308 self.record = record
4309 self._refresh()
4310 if record:
4311 self.scripts = ["http://api.maps.yahoo.com/ajaxymap?v=3.8&appid=%s" % self.apikey]
4312 else:
4313 self.scripts = []
43144207
4315 def as_dict(self):4208 def as_dict(self):
4316 gis = self.gis
4317 record = self.record4209 record = self.record
4318 apikey = self.apikey4210 if record is not None:
43194211 # Mandatory attributes
4320 if not apikey:4212 #"ApiKey": self.apikey
4321 # Cannot display Yahoo layers if we have no valid API Key4213 output = {
4322 return None4214 }
43234215
4324 config = gis.get_config()4216 # Attributes which are defaulted client-side if not set
4325 if Projection(gis, id=config.projection_id).epsg != 900913:4217 if record.satellite_enabled:
4326 # Cannot display Yahoo layers unless we're using the4218 output["Satellite"] = record.satellite or "Yahoo Satellite"
4327 # Spherical Mercator Projection4219 if record.maps_enabled:
4328 return None4220 output["Maps"] = record.maps or "Yahoo Maps"
43294221 if record.hybrid_enabled:
4330 # Mandatory attributes4222 output["Hybrid"] = record.hybrid or "Yahoo Hybrid"
4331 #"ApiKey": self.apikey4223
4332 output = {4224 return output
4333 }4225 else:
43344226 return None
4335 # Attributes which are defaulted client-side if not set4227
4336 if record.satellite_enabled:4228class MultiRecordLayer(Layer):
4337 output["Satellite"] = record.satellite or "Yahoo Satellite"4229 def __init__(self, gis):
4338 if record.maps_enabled:4230 super(MultiRecordLayer, self).__init__(gis)
4339 output["Maps"] = record.maps or "Yahoo Maps"4231 self.sublayers = []
4340 if record.hybrid_enabled:4232 self.scripts = []
4341 output["Hybrid"] = record.hybrid or "Yahoo Hybrid"4233
43424234 auth = gis.auth
4343 return output4235
4236 layer_type_list = []
4237 # Read the enabled Layers
4238 for record in current.db(self.table.enabled == True).select():
4239 # Check user is allowed to access the layer
4240 role_required = record.role_required
4241 if (not role_required) or auth.s3_has_role(role_required):
4242 self.sublayers.append(self.SubLayer(gis, record))
4243
4244 def as_javascript(self):
4245 """
4246 Output the Layer as Javascript
4247 - suitable for inclusion in the HTML page
4248 """
4249 sublayer_dicts = []
4250 for sublayer in self.sublayers:
4251 # Read the output dict for this sublayer
4252 sublayer_dict = sublayer.as_dict()
4253 if sublayer_dict:
4254 # Add this layer to the list of layers for this layer type
4255 sublayer_dicts.append(sublayer_dict)
4256
4257 if sublayer_dicts:
4258 # Output the Layer Type as JSON
4259 layer_type_json = json.dumps(sublayer_dicts,
4260 sort_keys=True,
4261 indent=4)
4262 return "%s = %s\n" % (self.js_array, layer_type_json)
4263 else:
4264 return None
4265
4266 class SubLayer(object):
4267 def __init__(self, gis, record):
4268 # Ensure all attributes available (even if Null)
4269 self.gis = gis
4270 self.__dict__.update(record)
4271 del record
4272 self.safe_name = re.sub('[\\"]', "", self.name)
4273
4274 if hasattr(self, "marker_id"):
4275 self.marker = Marker(gis, self.marker_id)
4276 if hasattr(self, "projection_id"):
4277 self.projection = Projection(gis, self.projection_id)
4278
4279 def setup_clustering(self, output):
4280 gis = self.gis
4281 cluster_distance = gis.cluster_distance
4282 cluster_threshold = gis.cluster_threshold
4283 if self.cluster_distance != cluster_distance:
4284 output["cluster_distance"] = self.cluster_distance
4285 if self.cluster_threshold != cluster_threshold:
4286 output["cluster_threshold"] = self.cluster_threshold
4287
4288 def setup_visibility_and_opacity(self, output):
4289 if not self.visible:
4290 output["visibility"] = False
4291 if self.opacity != 1:
4292 output["opacity"] = "%.1f" % self.opacity
4293
4294 def add_attributes_if_not_default(self, output, **values_and_defaults):
4295 # could also write values in debug mode, to check if defaults ignored.
4296 # could also check values are not being overwritten.
4297 for key, (value, defaults) in values_and_defaults.iteritems():
4298 if value not in defaults:
4299 output[key] = value
4300
4301 #def set_marker(self):
4302 # " Set the Marker for the Layer "
4303 # gis = self.gis
4304 # self.marker = Marker(gis, self.marker_id)
4305
4306 #def set_projection(self):
4307 # " Set the Projection for the Layer "
4308 # gis = self.gis
4309 # self.projection = Projection(gis, self.projection_id)
43444310
4345# -----------------------------------------------------------------------------4311# -----------------------------------------------------------------------------
4346class FeatureLayer(Layer):4312class FeatureLayer(MultiRecordLayer):
4347 """ Feature Layer from Catalogue """4313 """ Feature Layer from Catalogue """
4348 def __init__(self, gis, record=None):4314 table_name = "gis_layer_feature"
4349 db = current.db4315 js_array = "S3.gis.layers_features"
4350 tablename = "gis_layer_feature"4316
4351 try:4317 class SubLayer(MultiRecordLayer.SubLayer):
4352 table = db[tablename]4318 def __init__(self, gis, record):
4353 except:4319 record_module = record.module
4354 current.manager.load(tablename)4320 if record_module is not None:
4355 table = db[tablename]4321 if record_module not in gis.deployment_settings.modules:
43564322 raise Exception("%s module is disabled" % record_module)
4357 self.gis = gis4323 if not gis.auth.permission(c=record.module, f=record.resource):
4358 self.table = table4324 raise Exception("User has no permission to this resource (in ACL)")
4359 self.js_array = "S3.gis.layers_features"4325 else:
4360 self.record = record4326 raise Exception("FeatureLayer Record '%s' has no module" % record.name)
4361 self._refresh()4327 super(FeatureLayer.SubLayer, self).__init__(gis, record)
4362 self.scripts = []4328
43634329 def as_dict(self):
4364 def as_dict(self):4330 gis = self.gis
4365 gis = self.gis4331
4366 cluster_distance = gis.cluster_distance4332 request = current.request
4367 cluster_threshold = gis.cluster_threshold4333 deployment_settings = gis.deployment_settings
4368 record = self.record4334
4369 marker = self.marker4335 url = "%s.geojson?layer=%i" % (URL(self.module, self.resource),
43704336 self.id)
4371 auth = gis.auth4337 if self.filter:
4372 request = current.request4338 url = "%s&%s" % (url, self.filter)
4373 deployment_settings = gis.deployment_settings4339
43744340 # Mandatory attributes
4375 if record.module not in deployment_settings.modules:4341 output = {
4376 # Module is disabled4342 "name": self.safe_name,
4377 return
4378 if not auth.permission(c=record.module, f=record.resource):
4379 # User has no permission to this resource (in ACL)
4380 return
4381
4382 name_safe = self._name_safe(record.name)
4383
4384 url = "%s.geojson?layer=%i" % (URL(record.module,record.resource),
4385 record.id)
4386 if record.filter:
4387 url = "%s&%s" % (url, record.filter)
4388
4389 # Mandatory attributes
4390 output = {
4391 "name": name_safe,
4392 "url": url,4343 "url": url,
4393 "marker_image": marker.image,
4394 "marker_height": marker.height,
4395 "marker_width": marker.width,
4396 }4344 }
43974345 self.marker.add_attributes_to_output(output)
4398 # Attributes which are defaulted client-side if not set4346 self.setup_visibility_and_opacity(output)
4399 if not record.visible:4347 self.setup_clustering(output)
4400 output["visibility"] = False4348
4401 if record.opacity != 1:4349 return output
4402 output["opacity"] = "%.1f" % record.opacity
4403 if record.cluster_distance != cluster_distance:
4404 output["cluster_distance"] = record.cluster_distance
4405 if record.cluster_threshold != cluster_threshold:
4406 output["cluster_threshold"] = record.cluster_threshold
4407
4408 return output
44094350
4410# -----------------------------------------------------------------------------4351# -----------------------------------------------------------------------------
4411class GeoJSONLayer(Layer):4352class GeoJSONLayer(MultiRecordLayer):
4412 """ GeoJSON Layer from Catalogue """4353 """ GeoJSON Layer from Catalogue """
4413 def __init__(self, gis, record=None):4354 table_name = "gis_layer_geojson"
4414 db = current.db4355 js_array = "S3.gis.layers_geojson"
4415 tablename = "gis_layer_geojson"4356
4416 try:4357 class SubLayer(MultiRecordLayer.SubLayer):
4417 table = db[tablename]4358 def as_dict(self):
4418 except:4359 # Mandatory attributes
4419 current.manager.load(tablename)4360 output = {
4420 table = db[tablename]4361 "name": self.safe_name,
44214362 "url": self.url,
4422 self.gis = gis
4423 self.table = table
4424 self.js_array = "S3.gis.layers_geojson"
4425 self.record = record
4426 self._refresh()
4427 self.scripts = []
4428
4429 def as_dict(self):
4430 gis = self.gis
4431 cluster_distance = gis.cluster_distance
4432 cluster_threshold = gis.cluster_threshold
4433 record = self.record
4434 marker = self.marker
4435 projection = self.projection
4436
4437 name_safe = self._name_safe(record.name)
4438 # Mandatory attributes
4439 output = {
4440 "name": name_safe,
4441 "url": record.url,
4442 "marker_image": marker.image,
4443 "marker_height": marker.height,
4444 "marker_width": marker.width,
4445 }4363 }
44464364 self.marker.add_attributes_to_output(output)
4447 # Attributes which are defaulted client-side if not set4365
4448 if projection.epsg != 4326:4366 # Attributes which are defaulted client-side if not set
4449 output["projection"] = projection.epsg4367 projection = self.projection
4450 if not record.visible:4368 if projection.epsg != 4326:
4451 output["visibility"] = False4369 output["projection"] = projection.epsg
4452 if record.opacity != 1:4370 self.setup_visibility_and_opacity(output)
4453 output["opacity"] = "%.1f" % record.opacity4371 self.setup_clustering(output)
4454 if record.cluster_distance != cluster_distance:4372
4455 output["cluster_distance"] = record.cluster_distance4373 return output
4456 if record.cluster_threshold != cluster_threshold:
4457 output["cluster_threshold"] = record.cluster_threshold
4458
4459 return output
44604374
4461# -----------------------------------------------------------------------------4375# -----------------------------------------------------------------------------
4462class GeoRSSLayer(Layer):4376class GeoRSSLayer(MultiRecordLayer):
4463 """ GeoRSS Layer from Catalogue """4377 """ GeoRSS Layer from Catalogue """
4464 def __init__(self, gis, record=None):4378 table_name = "gis_layer_georss"
4465 db = current.db4379 js_array = "S3.gis.layers_georss"
4466 tablename = "gis_layer_georss"4380
4467 try:4381 def __init__(self, gis):
4468 table = db[tablename]4382 super(GeoRSSLayer, self).__init__(gis)
4469 except:4383 GeoRSSLayer.SubLayer.cachetable = current.db.gis_cache
4470 current.manager.load(tablename)4384
4471 table = db[tablename]4385 class SubLayer(MultiRecordLayer.SubLayer):
44724386 def as_dict(self):
4473 self.gis = gis4387 gis = self.gis
4474 self.table = table4388
4475 self.cachetable = db.gis_cache4389 db = current.db
4476 self.js_array = "S3.gis.layers_georss"4390 request = current.request
4477 self.record = record4391 public_url = gis.public_url
4478 self._refresh()4392 cachetable = self.cachetable
4479 self.scripts = []4393
44804394 url = self.url
4481 def as_dict(self):4395 # Check to see if we should Download layer to the cache
4482 gis = self.gis4396 download = True
4483 cluster_distance = gis.cluster_distance4397 query = (cachetable.source == url)
4484 cluster_threshold = gis.cluster_threshold4398 existing_cached_copy = db(query).select(cachetable.modified_on,
4485 record = self.record4399 limitby=(0, 1)).first()
4486 marker = self.marker4400 refresh = self.refresh or 900 # 15 minutes set if we have no data (legacy DB)
44874401 if existing_cached_copy:
4488 db = current.db4402 modified_on = existing_cached_copy.modified_on
4489 request = current.request4403 cutoff = modified_on + timedelta(seconds=refresh)
4490 public_url = gis.public_url4404 if request.utcnow < cutoff:
4491 cachetable = self.cachetable4405 download = False
44924406 if download:
4493 url = record.url4407 # Download layer to the Cache
4494 # Check to see if we should Download layer to the cache4408 # @ToDo: Call directly without going via HTTP
4495 download = True4409 # s3mgr = current.manager
4496 query = (cachetable.source == url)4410 # @ToDo: Make this async by using Celery (also use this for the refresh time)
4497 cached = db(query).select(cachetable.modified_on,4411 fields = ""
4498 limitby=(0, 1)).first()4412 if self.data:
4499 refresh = record.refresh or 900 # 15 minutes set if we have no data (legacy DB)4413 fields = "&data_field=%s" % self.data
4500 if cached:4414 if self.image:
4501 modified_on = cached.modified_on4415 fields = "%s&image_field=%s" % (fields, self.image)
4502 cutoff = modified_on + timedelta(seconds=refresh)4416 _url = "%s%s/update.georss?fetchurl=%s%s" % (public_url,
4503 if request.utcnow < cutoff:4417 URL(c="gis", f="cache_feed"),
4504 download = False4418 url,
4505 if download:4419 fields)
4506 # Download layer to the Cache4420 try:
4507 # @ToDo: Call directly without going via HTTP4421 # @ToDo: Need to commit to not have DB locked with SQLite?
4508 # s3mgr = current.manager4422 fetch(_url)
4509 # @ToDo: Make this async by using Celery (also use this for the refresh time)4423 if existing_cached_copy:
4510 fields = ""4424 # Clear old selfs which are no longer active
4511 if record.data:4425 query = (cachetable.source == url) & \
4512 fields = "&data_field=%s" % record.data4426 (cachetable.modified_on < cutoff)
4513 if record.image:4427 db(query).delete()
4514 fields = "%s&image_field=%s" % (fields, record.image)4428 except:
4515 _url = "%s%s/update.georss?fetchurl=%s%s" % (public_url,4429 # Feed down
4516 URL(c="gis", f="cache_feed"),4430 if existing_cached_copy:
4517 url,4431 # Use cached copy
4518 fields)4432 # Should we Update timestamp to prevent every
4519 try:4433 # subsequent request attempting the download?
4520 # @ToDo: Need to commit to not have DB locked with SQLite?4434 #query = (cachetable.source == url)
4521 fetch(_url)4435 #db(query).update(modified_on=request.utcnow)
4522 if cached:4436 pass
4523 # Clear old records which are no longer active4437 else:
4524 query = (cachetable.source == url) & \4438 raise Exception("No cached copy available - skip layer")
4525 (cachetable.modified_on < cutoff)4439
4526 db(query).delete()4440 name_safe = self.safe_name
4527 except:4441
4528 # Feed down4442 # Pass the GeoJSON URL to the client
4529 if cached:4443 # Filter to the source of this feed
4530 # Use cached copy4444 url = "%s.geojson?cache.source=%s" % (URL(c="gis", f="cache_feed"),
4531 # Should we Update timestamp to prevent every4445 url)
4532 # subsequent request attempting the download?4446
4533 #query = (cachetable.source == url)4447 # Mandatory attributes
4534 #db(query).update(modified_on=request.utcnow)4448 output = {
4535 pass4449 "name": name_safe,
4536 else:4450 "url": url,
4537 # No cached copy available - skip layer4451 }
4538 return4452 self.marker.add_attributes_to_output(output)
45394453
4540 name_safe = self._name_safe(record.name)4454 # Attributes which are defaulted client-side if not set
45414455 if self.refresh != 900:
4542 # Pass the GeoJSON URL to the client4456 output["refresh"] = self.refresh
4543 # Filter to the source of this feed4457 self.setup_visibility_and_opacity(output)
4544 url = "%s.geojson?cache.source=%s" % (URL(c="gis", f="cache_feed"),4458 self.setup_clustering(output)
4545 url)4459
45464460 return output
4547 # Mandatory attributes
4548 output = {
4549 "name": name_safe,
4550 "url": url,
4551 "marker_image": marker.image,
4552 "marker_height": marker.height,
4553 "marker_width": marker.width,
4554 }
4555
4556 # Attributes which are defaulted client-side if not set
4557 if record.refresh != 900:
4558 output["refresh"] = record.refresh
4559 if not record.visible:
4560 output["visibility"] = False
4561 if record.opacity != 1:
4562 output["opacity"] = "%.1f" % record.opacity
4563 if record.cluster_distance != cluster_distance:
4564 output["cluster_distance"] = record.cluster_distance
4565 if record.cluster_threshold != cluster_threshold:
4566 output["cluster_threshold"] = record.cluster_threshold
4567
4568 return output
45694461
4570# -----------------------------------------------------------------------------4462# -----------------------------------------------------------------------------
4571class GPXLayer(Layer):4463class GPXLayer(MultiRecordLayer):
4572 """ GPX Layer from Catalogue """4464 """ GPX Layer from Catalogue """
4573 def __init__(self, gis, record=None):4465 table_name = "gis_layer_gpx"
4574 db = current.db4466 js_array = "S3.gis.layers_gpx"
4575 tablename = "gis_layer_gpx"4467
4576 try:4468 def __init__(self, gis):
4577 table = db[tablename]4469 super(GPXLayer, self).__init__(gis)
4578 except:4470
4579 current.manager.load(tablename)4471# if record:
4580 table = db[tablename]4472# self.url = "%s/%s" % (URL(c="default", f="download"),
45814473# record.track)
4582 self.gis = gis4474# else:
4583 self.table = table4475# self.url = None
4584 self.js_array = "S3.gis.layers_gpx"4476
4585 self.record = record4477 class SubLayer(MultiRecordLayer.SubLayer):
4586 self._refresh()4478 def as_dict(self):
4587 self.scripts = []4479 gis = self.gis
45884480 request = current.request
4589 def as_dict(self):4481
4590 gis = self.gis4482 url = URL(c="default", f="download",
4591 cluster_distance = gis.cluster_distance4483 args=self.track)
4592 cluster_threshold = gis.cluster_threshold4484
4593 record = self.record4485 # Mandatory attributes
4594 marker = self.marker4486 output = {
45954487 "name": self.safe_name,
4596 name_safe = self._name_safe(record.name)4488 "url": url,
45974489 }
4598 request = current.request4490 self.marker.add_attributes_to_output(output)
4599 url = URL(c="default", f="download",4491 self.add_attributes_if_not_default(
4600 args=record.track)4492 output,
46014493 waypoints = (self.waypoints, (True,)),
4602 # Mandatory attributes4494 tracks = (self.tracks, (True,)),
4603 output = {4495 routes = (self.routes, (True,)),
4604 "name": name_safe,4496 )
4605 "url": url,4497 self.setup_visibility_and_opacity(output)
4606 "marker_image": marker.image,4498 self.setup_clustering(output)
4607 "marker_height": marker.height,4499 return output
4608 "marker_width": marker.width,
4609 }
4610
4611 # Attributes which are defaulted client-side if not set
4612 if not record.waypoints:
4613 output["waypoints"] = False
4614 if not record.tracks:
4615 output["tracks"] = False
4616 if not record.routes:
4617 output["routes"] = False
4618 if not record.visible:
4619 output["visibility"] = False
4620 if record.opacity != 1:
4621 output["opacity"] = "%.1f" % record.opacity
4622 if record.cluster_distance != cluster_distance:
4623 output["cluster_distance"] = record.cluster_distance
4624 if record.cluster_threshold != cluster_threshold:
4625 output["cluster_threshold"] = record.cluster_threshold
4626
4627 return output
4628
4629 def refresh(self):
4630 " Refresh the attributes of the Layer "
4631 gis = self.gis
4632 request = current.request
4633 record = self.record
4634 table = self.table
4635 if "marker_id" in table:
4636 self.set_marker()
4637 if "projection_id" in table:
4638 self.set_projection()
4639 if record:
4640 self.url = "%s/%s" % (URL(c="default", f="download"),
4641 record.track)
4642 else:
4643 self.url = None
46444500
4645# -----------------------------------------------------------------------------4501# -----------------------------------------------------------------------------
4646class KMLLayer(Layer):4502class KMLLayer(MultiRecordLayer):
4647 """ KML Layer from Catalogue """4503 """ KML Layer from Catalogue """
4648 def __init__(self, gis, record=None):4504 table_name = "gis_layer_kml"
4649 db = current.db4505 js_array = "S3.gis.layers_kml"
4650 tablename = "gis_layer_kml"4506
4651 try:4507 def __init__(self, gis):
4652 table = db[tablename]4508 super(KMLLayer, self).__init__(gis)
4653 except:4509
4654 current.manager.load(tablename)4510 "Set up the KML cache, should be done once per request"
4655 table = db[tablename]
4656
4657 self.gis = gis
4658 self.table = table
4659 # @ToDo: Migrate to gis_cache
4660 self.cachetable = db.gis_cache2
4661 self.js_array = "S3.gis.layers_kml"
4662 self.record = record
4663 self._refresh()
4664 self.scripts = []
4665
4666 # Can we cache downloaded KML feeds?4511 # Can we cache downloaded KML feeds?
4667 # Needed for unzipping & filtering as well4512 # Needed for unzipping & filtering as well
4668 # @ToDo: Should we move this folder to static to speed up access to cached content?4513 # @ToDo: Should we move this folder to static to speed up access to cached content?
4669 # Do we need to secure it?4514 # Do we need to secure it?
4670 cachepath = os.path.join(current.request.folder, "uploads", "gis_cache")4515 cachepath = os.path.join(current.request.folder, "uploads", "gis_cache")
4671 if os.access(cachepath, os.W_OK):4516
4672 cacheable = True4517 if os.path.exists(cachepath):
4518 cacheable = os.access(cachepath, os.W_OK)
4673 else:4519 else:
4674 try:4520 try:
4675 os.mkdir(cachepath)4521 os.mkdir(cachepath)
4522 except OSError, os_error:
4523 self.gis.debug(
4524 "GIS: KML layers cannot be cached: %s %s" % (
4525 cachepath,
4526 os_error
4527 )
4528 )
4529 cacheable = False
4530 else:
4676 cacheable = True4531 cacheable = True
4532<<<<<<< TREE
4677 except:4533 except:
4678 cacheable = False4534 cacheable = False
4679 self.cacheable = cacheable4535 self.cacheable = cacheable
@@ -4720,209 +4576,164 @@
4720 # Download file (async, if workers alive)4576 # Download file (async, if workers alive)
4721 current.s3task.async("download_kml",4577 current.s3task.async("download_kml",
4722 args=[record.id, filename])4578 args=[record.id, filename])
4579=======
4580 # @ToDo: Migrate to gis_cache
4581 KMLLayer.cachetable = current.db.gis_cache2
4582 KMLLayer.cacheable = cacheable
4583 KMLLayer.cachepath = cachepath
4584
4585
4586 class SubLayer(MultiRecordLayer.SubLayer):
4587 def as_dict(self):
4588 gis = self.gis
4589
4590 T = current.T
4591 db = current.db
4592 request = current.request
4593 response = current.response
4594 public_url = gis.public_url
4595
4596 cachetable = KMLLayer.cachetable
4597 cacheable = KMLLayer.cacheable
4598 cachepath = KMLLayer.cachepath
4599
4600 name = self.name
4601 if cacheable:
4602 _name = urllib2.quote(name)
4603 _name = _name.replace("%", "_")
4604 filename = "%s.file.%s.kml" % (cachetable._tablename,
4605 _name)
4606
4607 # Should we download a fresh copy of the source file?
4608 download = True
4609 query = (cachetable.name == name)
4610 cached = db(query).select(cachetable.modified_on,
4611 limitby=(0, 1)).first()
4612 refresh = self.refresh or 900 # 15 minutes set if we have no data (legacy DB)
4613>>>>>>> MERGE-SOURCE
4723 if cached:4614 if cached:
4724 db(query).update(modified_on=request.utcnow)4615 modified_on = cached.modified_on
4725 else:4616 cutoff = modified_on + timedelta(seconds=refresh)
4726 cachetable.insert(name=name, file=filename)4617 if request.utcnow < cutoff:
47274618 download = False
4728 url = URL(c="default", f="download",4619
4729 args=[filename])4620 if download:
4730 else:4621 # Download file
4731 # No caching possible (e.g. GAE), display file direct from remote (using Proxy)4622 if response.s3.tasks_active():
4732 # (Requires OpenLayers.Layer.KML to be available)4623 # Async call
4733 url = record.url4624 db.task_scheduled.insert(name="download_kml_%s" % uuid.uuid4(),
47344625 func="download_kml",
4735 # Mandatory attributes4626 args=json.dumps([record.id,
4736 output = {4627 filename]))
4737 "name": name_safe,4628 else:
4738 "url": url,4629 # Sync call
4739 "marker_image": marker.image,4630 gis.download_kml(self.id, filename)
4740 "marker_height": marker.height,4631 if cached:
4741 "marker_width": marker.width,4632 db(query).update(modified_on=request.utcnow)
4742 }4633 else:
47434634 cachetable.insert(name=name, file=filename)
4744 # Attributes which are defaulted client-side if not set4635
4745 if record.title and record.title != "name":4636 url = URL(r=request, c="default", f="download",
4746 output["title"] = record.title4637 args=[filename])
4747 if record.body and record.body != "description":4638 else:
4748 output["body"] = record.body4639 # No caching possible (e.g. GAE), display file direct from remote (using Proxy)
4749 if record.refresh != 900:4640 # (Requires OpenLayers.Layer.KML to be available)
4750 output["refresh"] = record.refresh4641 url = self.url
4751 if not record.visible:4642
4752 output["visibility"] = False4643 output = dict(
4753 if record.opacity != 1:4644 name = self.safe_name,
4754 output["opacity"] = "%.1f" % record.opacity4645 url = url,
4755 if record.cluster_distance != cluster_distance:4646 )
4756 output["cluster_distance"] = record.cluster_distance4647 self.add_attributes_if_not_default(
4757 if record.cluster_threshold != cluster_threshold:4648 output,
4758 output["cluster_threshold"] = record.cluster_threshold4649 title = (self.title, ("name", None, "")),
47594650 body = (self.body, ("description", None)),
4760 return output4651 refresh = (self.refresh, (900,)),
4652 )
4653 self.setup_visibility_and_opacity(output)
4654 self.setup_clustering(output)
4655 self.marker.add_attributes_to_output(output)
4656 return output
47614657
4762# -----------------------------------------------------------------------------4658# -----------------------------------------------------------------------------
4763class TMSLayer(Layer):4659class TMSLayer(MultiRecordLayer):
4764 """ TMS Layer from Catalogue """4660 """ TMS Layer from Catalogue """
4765 def __init__(self, gis, record=None):4661 table_name = "gis_layer_tms"
4766 db = current.db4662 js_array = "S3.gis.layers_tms"
4767 tablename = "gis_layer_tms"4663
4768 try:4664 class SubLayer(MultiRecordLayer.SubLayer):
4769 table = db[tablename]4665 def as_dict(self):
4770 except:4666 output = {
4771 current.manager.load(tablename)4667 "name": self.safe_name,
4772 table = db[tablename]4668 "url": self.url,
47734669 "layername": self.layername
4774 self.gis = gis4670 }
4775 self.table = table4671 self.add_attributes_if_not_default(
4776 self.js_array = "S3.gis.layers_tms"4672 output,
4777 self.record = record4673 url2 = (self.url2, (None,)),
4778 self._refresh()4674 url3 = (self.url3, (None,)),
4779 self.scripts = []4675 format = (self.img_format, ("png", None)),
47804676 zoomLevels = (self.zoom_levels, (9,)),
4781 def as_dict(self):4677 attribution = (self.attribution, (None,)),
4782 #gis = self.gis4678 )
4783 record = self.record4679 return output
4784
4785 name_safe = self._name_safe(record.name)
4786
4787 # Mandatory attributes
4788 output = {
4789 "name": name_safe,
4790 "url": record.url,
4791 "layername": record.layername
4792 }
4793
4794 # Attributes which are defaulted client-side if not set
4795 if record.url2:
4796 output["url2"] = record.url2
4797 if record.url3:
4798 output["url3"] = record.url3
4799 if record.img_format != "png":
4800 output["format"] = record.img_format
4801 if record.zoom_levels != 9:
4802 output["zoomLevels"] = record.zoom_levels
4803 if record.attribution:
4804 output["attribution"] = record.attribution
4805
4806 return output
48074680
4808# -----------------------------------------------------------------------------4681# -----------------------------------------------------------------------------
4809class WFSLayer(Layer):4682class WFSLayer(MultiRecordLayer):
4810 """ WFS Layer from Catalogue """4683 """ WFS Layer from Catalogue """
4811 def __init__(self, gis, record=None):4684 table_name = "gis_layer_wfs"
4812 db = current.db4685 js_array = "S3.gis.layers_wfs"
4813 tablename = "gis_layer_wfs"4686
4814 try:4687 class SubLayer(MultiRecordLayer.SubLayer):
4815 table = db[tablename]4688 def as_dict(self):
4816 except:4689 output = dict(
4817 current.manager.load(tablename)4690 name = self.safe_name,
4818 table = db[tablename]4691 url = self.url,
48194692 title = self.title,
4820 self.gis = gis4693 featureType = self.featureType,
4821 self.table = table4694 featureNS = self.featureNS,
4822 self.js_array = "S3.gis.layers_wfs"4695 schema = self.wfs_schema,
4823 self.record = record4696 )
4824 self._refresh()4697 self.add_attributes_if_not_default(
4825 self.scripts = []4698 output,
48264699 version = (self.version, ("1.1.0",)),
4827 def as_dict(self):4700 geometryName = (self.geometryName, ("the_geom",)),
4828 gis = self.gis4701 styleField = (self.style_field, (None,)),
4829 cluster_distance = gis.cluster_distance4702 styleValues = (self.style_values, ("{}", None)),
4830 cluster_threshold = gis.cluster_threshold4703 projection = (self.projection.epsg, (4326,)),
4831 record = self.record
4832 projection = self.projection
4833
4834 name_safe = self._name_safe(record.name)
4835
4836 # Mandatory attributes
4837 output = {
4838 "name": name_safe,
4839 "url": record.url,
4840 "title": record.title,
4841 "featureType": record.featureType,
4842 "featureNS": record.featureNS,
4843 "schema": record.wfs_schema,
4844 #editable4704 #editable
4845 }4705 )
48464706 self.setup_visibility_and_opacity(output)
4847 # Attributes which are defaulted client-side if not set4707 self.setup_clustering(output)
4848 if record.version != "1.1.0":4708 return output
4849 output["version"] = record.version
4850 if record.geometryName != "the_geom":
4851 output["geometryName"] = record.geometryName
4852 if record.style_field:
4853 output["styleField"] = record.style_field
4854 if record.style_values and record.style_values != "{}":
4855 output["styleValues"] = record.style_values
4856 if projection.epsg != 4326:
4857 output["projection"] = projection.epsg
4858 if not record.visible:
4859 output["visibility"] = False
4860 if record.opacity != 1:
4861 output["opacity"] = "%.1f" % record.opacity
4862 if record.cluster_distance != cluster_distance:
4863 output["cluster_distance"] = record.cluster_distance
4864 if record.cluster_threshold != cluster_threshold:
4865 output["cluster_threshold"] = record.cluster_threshold
4866
4867 return output
48684709
4869# -----------------------------------------------------------------------------4710# -----------------------------------------------------------------------------
4870class WMSLayer(Layer):4711class WMSLayer(MultiRecordLayer):
4871 """ WMS Layer from Catalogue """4712 """ WMS Layer from Catalogue """
4872 def __init__(self, gis, record=None):4713 js_array = "S3.gis.layers_wms"
4873 db = current.db4714 table_name = "gis_layer_wms"
4874 tablename = "gis_layer_wms"4715
4875 try:4716 class SubLayer(MultiRecordLayer.SubLayer):
4876 table = db[tablename]4717 def as_dict(self):
4877 except:4718 output = dict(
4878 current.manager.load(tablename)4719 name = self.safe_name,
4879 table = db[tablename]4720 url = self.url,
48804721 layers = self.layers
4881 self.gis = gis4722 )
4882 self.table = table4723 self.add_attributes_if_not_default(
4883 self.js_array = "S3.gis.layers_wms"4724 output,
4884 self.record = record4725 transparent = (self.transparent, (True,)),
4885 self._refresh()4726 version = (self.version, ("1.1.1",)),
4886 self.scripts = []4727 format = (self.img_format, ("image/png",)),
48874728 map = (self.map, (None,)),
4888 def as_dict(self):4729 buffer = (self.buffer, (0,)),
4889 #gis = self.gis4730 base = (self.base, (False,)),
4890 record = self.record4731 style = (self.style, (None,)),
48914732 bgcolor = (self.bgcolor, (None,)),
4892 name_safe = self._name_safe(record.name)4733 tiled = (self.tiled, (False, )),
48934734 )
4894 # Mandatory attributes4735 self.setup_visibility_and_opacity(output)
4895 output = {4736 return output
4896 "name": name_safe,
4897 "url": record.url,
4898 "layers": record.layers
4899 }
4900
4901 # Attributes which are defaulted client-side if not set
4902 if not record.visible:
4903 output["visibility"] = False
4904 if record.opacity != 1:
4905 output["opacity"] = "%.1f" % record.opacity
4906 if not record.transparent:
4907 output["transparent"] = False
4908 if record.version != "1.1.1":
4909 output["version"] = record.version
4910 if record.img_format != "image/png":
4911 output["format"] = record.img_format
4912 if record.map:
4913 output["map"] = record.map
4914 if record.style:
4915 output["style"] = record.style
4916 if record.bgcolor:
4917 output["bgcolor"] = record.bgcolor
4918 if record.tiled:
4919 output["tiled"] = True
4920 if record.buffer:
4921 output["buffer"] = record.buffer
4922 if record.base:
4923 output["base"] = True
4924
4925 return output
49264737
4927# =============================================================================4738# =============================================================================
4928class S3MAP(S3Method):4739class S3MAP(S3Method):
@@ -5031,4 +4842,3 @@
5031 return page4842 return page
50324843
5033# END =========================================================================4844# END =========================================================================
5034
50354845
=== added file 'modules/test_utils/AddedRole.py'
--- modules/test_utils/AddedRole.py 1970-01-01 00:00:00 +0000
+++ modules/test_utils/AddedRole.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,25 @@
1
2class AddedRole(object):
3 """Adds a role and removes it at the end of a test no matter what happens.
4
5 """
6 def __init__(self, session, role):
7 self.role = role
8 self.session = session
9
10 def __enter__(self):
11 roles = self.session.s3.roles
12 role = self.role
13 if not role in roles:
14 roles.append(role)
15
16 def __exit__(self, type, value, traceback):
17 session_s3_roles = self.session.s3.roles
18 roles = list(session_s3_roles)
19 for i in range(len(roles)):
20 session_s3_roles.pop(0)
21 add_role = session_s3_roles.append
22 role = self.role
23 for role in roles:
24 if role is not role:
25 add_role(role)
026
=== added file 'modules/test_utils/Change.py'
--- modules/test_utils/Change.py 1970-01-01 00:00:00 +0000
+++ modules/test_utils/Change.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,25 @@
1
2_UNDEFINED = object()
3
4class Change(object):
5 def __init__(self, target, changes):
6 self.changes = changes
7 self.target = target
8
9 def __enter__(self):
10 assert not hasattr(self, "originals")
11 self.originals = originals = {}
12 # store originals and set new values
13 for name, value in self.changes.iteritems():
14 originals[name] = getattr(self.target, name, _UNDEFINED)
15 setattr(self.target, name, value)
16
17 def __exit__(self, type, value, traceback):
18 # restore originals
19 for name, value in self.originals.iteritems():
20 if value is _UNDEFINED:
21 delattr(self, name)
22 else:
23 setattr(self.target, name, value)
24 del self.originals
25
026
=== added file 'modules/test_utils/ExpectSessionWarning.py'
--- modules/test_utils/ExpectSessionWarning.py 1970-01-01 00:00:00 +0000
+++ modules/test_utils/ExpectSessionWarning.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,14 @@
1
2class ExpectSessionWarning(object):
3 def __init__(self, session, warning):
4 self.warning = warning
5 self.session = session
6
7 def __enter__(self):
8 session = self.session
9 warnings = []
10 self.warnings = session.warning = warnings
11
12 def __exit__(self, type, value, traceback):
13 if type is None:
14 assert self.warning in self.warnings
015
=== added file 'modules/test_utils/ExpectedException.py'
--- modules/test_utils/ExpectedException.py 1970-01-01 00:00:00 +0000
+++ modules/test_utils/ExpectedException.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,13 @@
1
2class ExpectedException(object):
3 def __init__(self, ExceptionClass):
4 self.ExceptionClass = ExceptionClass
5
6 def __enter__(self):
7 pass
8
9 def __exit__(self, type, value, traceback):
10 return issubclass(type, self.ExceptionClass), (
11 "%s not raised" % self.ExceptionClass.__name__
12 )
13
014
=== added file 'modules/test_utils/InsertedRecord.py'
--- modules/test_utils/InsertedRecord.py 1970-01-01 00:00:00 +0000
+++ modules/test_utils/InsertedRecord.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,19 @@
1
2from clear_table import clear_table
3
4class InsertedRecord(object):
5 """Inserts and commits a record and removes it at the end of
6 a test no matter what happens.
7
8 """
9 def __init__(self, db, table, data):
10 self.db = db
11 self.table = table
12 self.data = data
13
14 def __enter__(self):
15 self.table.insert(**self.data)
16 self.db.commit()
17
18 def __exit__(self, type, value, traceback):
19 clear_table(self.db, self.table)
020
=== added file 'modules/test_utils/Web2pyNosePlugin.py'
--- modules/test_utils/Web2pyNosePlugin.py 1970-01-01 00:00:00 +0000
+++ modules/test_utils/Web2pyNosePlugin.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,106 @@
1
2import nose
3import re
4from itertools import imap
5import unittest
6
7class Web2pyNosePlugin(nose.plugins.base.Plugin):
8 # see: http://somethingaboutorange.com/mrl/projects/nose/0.11.1/plugins/writing.html
9
10 """This plugin is designed to give the web2py environment to the tests.
11 """
12 score = 0
13 # always enable as this plugin can only
14 # be selected by running this script
15 enabled = True
16
17 def __init__(
18 self,
19 application_name,
20 environment,
21 directory_pattern,
22 test_folders
23 ):
24 super(Web2pyNosePlugin, self).__init__()
25 self.application_name = application_name
26 self.environment = environment
27 self.directory_pattern = directory_pattern
28 self.test_folders = test_folders
29
30 def options(self, parser, env):
31 """Register command line options"""
32 pass
33
34 def wantDirectory(self, dirname):
35 return bool(re.search(self.directory_pattern, dirname))
36
37 def wantFile(self, file_name):
38 print file_name
39 return file_name.endswith(".py") and any(
40 imap(file_name.__contains__, self.test_folders)
41 )
42
43 def wantModule(self, module):
44 return False
45
46 def loadTestsFromName(self, file_name, discovered):
47 """Sets up the unit-testing environment.
48
49 This involves loading modules as if by web2py.
50 Also we must have a test database.
51
52 If testing controllers, tests need to set up the request themselves.
53
54 """
55 if file_name.endswith(".py"):
56
57 # Is it possible that the module could load
58 # other code that is using the original db?
59
60 test_globals = self.environment
61
62 module_globals = dict(self.environment)
63 # execfile is used because it doesn't create a module
64 # or load the module from sys.modules if it exists.
65
66 execfile(file_name, module_globals)
67
68 import inspect
69 # we have to return something, otherwise nose
70 # will let others have a go, and they won't pass
71 # in the web2py environment, so we'll get errors
72 tests = []
73
74 for name, thing in module_globals.iteritems():
75 if (
76 # don't bother with globally imported things
77 name not in test_globals \
78 # unless they have been overridden
79 or test_globals[name] is not thing
80 ):
81 if (
82 isinstance(thing, type)
83 and issubclass(thing, unittest.TestCase)
84 ):
85 # look for test methods
86 for member_name in dir(thing):
87 if member_name.startswith("test"):
88 if callable(getattr(thing, member_name)):
89 tests.append(thing(member_name))
90 elif (
91 name.startswith("test")
92 or name.startswith("Test")
93 ):
94 if inspect.isfunction(thing):
95 function = thing
96 function_name = name
97 # things coming from execfile have no module
98 #print file_name, function_name, function.__module__
99 if function.__module__ in ("__main__", None):
100 tests.append(
101 nose.case.FunctionTestCase(function)
102 )
103 return tests
104 else:
105 return []
106
0107
=== modified file 'modules/test_utils/__init__.py'
--- modules/test_utils/__init__.py 2011-08-11 19:25:52 +0000
+++ modules/test_utils/__init__.py 2011-09-06 11:51:25 +0000
@@ -1,2 +1,12 @@
11
2from compare_lines import compare_lines
3\ No newline at end of file2\ No newline at end of file
3from compare_lines import compare_lines
4from clear_table import clear_table
5from find_JSON_format_data_structure import *
6from Web2pyNosePlugin import Web2pyNosePlugin
7from assert_equal import *
8
9from InsertedRecord import *
10from AddedRole import *
11from ExpectedException import *
12from Change import *
13from ExpectSessionWarning import ExpectSessionWarning
414
=== added file 'modules/test_utils/assert_equal.py'
--- modules/test_utils/assert_equal.py 1970-01-01 00:00:00 +0000
+++ modules/test_utils/assert_equal.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,60 @@
1
2
3def assert_same_type(expected, actual):
4 assert isinstance(actual, type(expected)), "%s vs. %s" % (type(expected), type(actual))
5
6def assert_equal_sequence(expected, actual):
7 assert len(expected) == len(actual), "length should be %i, not %i:\n%s" % (
8 len(expected), len(actual), actual
9 )
10 for i in range(len(expected)):
11 try:
12 assert_equal(expected[i], actual[i])
13 except AssertionError, assertion_error:
14 raise AssertionError(
15 str(assertion_error)
16 )
17
18def assert_equal_set(expected, actual):
19 missing = expected.difference(actual)
20 assert not missing, "Missing: %s" % ", ".join(missing)
21
22 extra = actual.difference(expected)
23 assert not extra, "Extra: %s" % ", ".join(extra)
24
25def assert_equal_dict(expected, actual):
26 assert_equal_set(
27 expected = set(expected.keys()),
28 actual = set(actual.keys())
29 )
30 for key in expected.iterkeys():
31 try:
32 assert_equal(expected[key], actual[key])
33 except AssertionError, assertion_error:
34 raise AssertionError(
35 "[%s] %s" % (
36 key,
37 str(assertion_error),
38 )
39 )
40
41def assert_equal_value(expected, actual):
42 assert expected == actual, "%s != %s" % (expected, actual)
43
44_compare_procs = {
45 list: assert_equal_sequence,
46 int: assert_equal_value,
47 float: assert_equal_value,
48 str: assert_equal_value,
49 unicode: assert_equal_value, #sequence,
50 dict: assert_equal_dict,
51 set: assert_equal_set,
52}
53
54def assert_equal(expected, actual):
55 assert_same_type(expected, actual)
56 compare_proc = _compare_procs.get(type(expected), assert_equal_value)
57 compare_proc(
58 expected,
59 actual
60 )
061
=== added file 'modules/test_utils/clear_table.py'
--- modules/test_utils/clear_table.py 1970-01-01 00:00:00 +0000
+++ modules/test_utils/clear_table.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,4 @@
1
2def clear_table(db, db_table):
3 db(db_table.id).delete()
4 db.commit()
05
=== added file 'modules/test_utils/find_JSON_format_data_structure.py'
--- modules/test_utils/find_JSON_format_data_structure.py 1970-01-01 00:00:00 +0000
+++ modules/test_utils/find_JSON_format_data_structure.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,54 @@
1
2import re
3from json.decoder import JSONDecoder
4
5__all__ = (
6 "not_found",
7 "cannot_parse_JSON",
8 "find_JSON_format_data_structure"
9)
10
11def not_found(name, string):
12 raise Exception(
13 u"Cannot find %s in %s" % (name, string)
14 )
15
16def cannot_parse_JSON(string):
17 raise Exception(
18 u"Cannot parse JSON: '%s'" % string
19 )
20
21def find_JSON_format_data_structure(
22 string,
23 name,
24 found,
25 not_found,
26 cannot_parse_JSON
27):
28 """Finds a named JSON-format data structure in the string.
29
30 The name can be any string.
31 The pattern "name = " will be looked for in the string,
32 and the data structure following it parsed and returned as a python
33 data structure.
34 """
35 try:
36 name_start = string.index(name)
37 except ValueError:
38 not_found(name, string)
39 else:
40 name_length = len(name)
41 name_end = name_start + name_length
42
43 _, remaining = re.Scanner([
44 (r"\s*=\s*", lambda scanner, token: None)
45 ]).scan(
46 string[name_end:]
47 )
48
49 try:
50 data, end_position = JSONDecoder().raw_decode(remaining)
51 except ValueError, value_error:
52 cannot_parse_JSON(remaining)
53 else:
54 found(data)
055
=== modified file 'modules/test_utils/run.py'
--- modules/test_utils/run.py 2011-08-11 19:25:52 +0000
+++ modules/test_utils/run.py 2011-09-06 11:51:25 +0000
@@ -1,5 +1,8 @@
1#!python1#!python
22
3# capture web2py environment before doing anything else
4
5web2py_environment = dict(globals())
36
4__doc__ = """This script is run from the nose command in the 7__doc__ = """This script is run from the nose command in the
5application being tested:8application being tested:
@@ -8,249 +11,81 @@
811
9python2.6 ./applications/eden/tests/nose.py <nose arguments>12python2.6 ./applications/eden/tests/nose.py <nose arguments>
1013
14web2py runs a file which:
151. Sets up a plugin. This plugin registers itself so nose can use it.
162. Runs nose programmatically giving it the plugin
17nose loads the tests via the plugin.
18when the plugin loads the tests, it injects the web2py environment.
19
11"""20"""
1221
13########################################22import sys
14#23
15# web2py runs a file which:24from types import ModuleType
16# 1. Sets up a plugin. This plugin registers itself so nose can use it.
17# 2. Runs nose programmatically giving it the plugin
18#
19# nose loads the tests via the plugin.
20# when the plugin loads the tests, it injects the web2py environment.
21#
22########################################
23
24
25# @ToDo:
26# in particular, haven't checked that db is being replaced with the test_db
27# (work in progress)
28
29# using --import_models is OK for a single test, but for running a suite,
30# we probably need the test_env function to set up an environment
31
32# @ToDo: Provide an ignore_warns mode so that we can tackle ERRORs 1st
33# but FAILs often give us clues that help us fix ERRORs
34# fixing an error might itself cause a failure, but not be spotted until later.
35
36def use_test_db(db):
37 print "Creating test database..."
38 try:
39 test_db = use_test_db.db
40 except AttributeError:
41 # create test database by copying db
42 test_db_name = "sqlite://testing.sqlite"
43 print "Copying db tables into test database..."
44 test_db = DAL(test_db_name) # Name and location of the test DB file
45 # Copy tables!
46 for tablename in db.tables:
47 table_copy = []
48 for data in db[tablename]:
49 table_copy.append(
50 copy.copy(data)
51 )
52 test_db.define_table(tablename, *table_copy)
53 use_test_db.db = test_db
54 return test_db
55
56import nose25import nose
5726import glob
27import os.path
28import os
58import copy29import copy
59def test_env(30from gluon.globals import Request
60 imports,31import unittest
61 application,32
62 controller,33
63 function,34def load_module(application_relative_module_path):
64 folder,
65 globals,
66 create_test_db = use_test_db,
67 _module_root = "applications",
68):
69 """Sets up the unit-testing environment.
70
71 This involves loading modules as if by web2py.
72 Also we must make a test database.
73
74 """
75 from gluon.globals import Request
76 globals["Request"] = Request
77 request = Request()
78 request.application = application
79 request.controller = controller
80 request.function = function
81 request.folder = folder
82
83 globals["request"] = request
84
85 test_db = db#create_test_db(db)
86
87 import os35 import os
88 for import_path, names in imports:36 web2py_relative_module_path = ".".join((
89 #print import_path37 "applications", request.application, application_relative_module_path
90 #module = {"db": test_db}38 ))
91 #module.update(globals)39 imported_module = __import__(web2py_relative_module_path)
92 module = dict(globals)40 for step in web2py_relative_module_path.split(".")[1:]:
93 path_components = [_module_root] + import_path.split(".")41 imported_module = getattr(imported_module, step)
94 file_path = os.path.join(*path_components)+".py"42 return imported_module
95 # execfile is used because it doesn't create a module43
96 # and doesn't load the module if it exists.44web2py_environment["load_module"] = load_module
97 execfile(file_path, module)45web2py_env_module = ModuleType("web2py_env")
98 if names is "*":46web2py_env_module.__dict__.update(web2py_environment)
99 globals.update(module)47sys.modules["web2py_env"] = web2py_env_module
100 else:48
101 for name in names:
102 globals[name] = module[name]
103
104# -----------------------
105
106import os.path
10749
108application_name = request.application50application_name = request.application
10951application_folder_path = os.path.join("applications",application_name)
110model_files_pattern = os.path.join("applications",application_name,"models","*.py")52
111import glob53application = application_name
11254controller = "controller"
113test_env(55function = "function"
114 globals = globals(),56folder = os.path.join(os.getcwd(), "applications", application_name)
115 application = application_name,57
116 controller = "controller",58web2py_environment["Request"] = Request
117 function = "function",59request = Request()
118 folder = "folder",60request.application = application
119 imports = [61request.controller = controller
120 (application_name+".models.%s" % (module_name[len(model_files_pattern)-4:-3]), "*") 62request.function = function
121 for module_name in glob.glob(model_files_pattern)63request.folder = folder
122 ]64
123)65web2py_environment["request"] = request
12466current.request = request
125import sys67
126log = sys.stderr.write68controller_configuration = OrderedDict()
12769for controller_name in ["default"]+glob.glob(
128import unittest70 os.path.join(application_folder_path, "controllers", "*.py")
129from itertools import imap71):
72 controller_configuration[controller_name] = Storage(
73 name_nice = controller_name,
74 description = controller_name,
75 restricted = False,
76 module_type = 0
77 )
78
79current.deployment_settings.modules = controller_configuration
13080
131test_folders = set()81test_folders = set()
13282argv = []
133class Web2pyNosePlugin(nose.plugins.base.Plugin):
134 # see: http://somethingaboutorange.com/mrl/projects/nose/0.11.1/plugins/writing.html
135
136 """This plugin is designed to give the web2py environment to the tests.
137 """
138 score = 0
139 # always enable as this plugin can only
140 # be selected by running this script
141 enabled = True
142
143 def __init__(
144 self,
145 application_name,
146 environment,
147 create_test_db,
148 directory_pattern
149 ):
150 super(Web2pyNosePlugin, self).__init__()
151 self.application_name = application_name
152 self.environment = dict(
153 db = db#create_test_db(db)
154 )
155 self.environment.update(environment)
156 self.directory_pattern = directory_pattern
157
158 def options(self, parser, env):
159 """Register command line options"""
160 return
161 parser.add_option(
162 "--web2py",
163 dest="web2py",
164 action="append",
165 metavar="ATTR",
166 help="Use web2py environment when loading tests"
167 )
168
169 def wantDirectory(self, dirname):
170 return bool(re.search(self.directory_pattern, dirname))
171
172 def wantFile(self, file_name):
173 return file_name.endswith(".py") and any(
174 imap(file_name.__contains__, test_folders)
175 )
176
177 def wantModule(self, module):
178 return False
179
180 def loadTestsFromName(self, file_name, discovered):
181 """Sets up the unit-testing environment.
182
183 This involves loading modules as if by web2py.
184 Also we must have a test database.
185
186 If testing controllers, tests need to set up the request themselves.
187
188 """
189 if file_name.endswith(".py"):
190# log(file_name)
191
192 # assert 0, file_name
193 # stop
194
195 # Is it possible that the module could load
196 # other code that is using the original db?
197
198 test_globals = self.environment
199
200 # execfile is used because it doesn't create a module
201 # and doesn't load the module into sys.modules if it exists.
202 module_globals = dict(self.environment)
203 execfile(file_name, module_globals)
204
205 import inspect
206 # we have to return something, otherwise nose
207 # will let others have a go, and they won't pass
208 # in the web2py environment, so we'll get errors
209 tests = []
210
211 for name, thing in module_globals.iteritems():
212 if (
213 # don't bother with globally imported things
214 name not in test_globals \
215 # unless they have been overridden
216 or test_globals[name] is not thing
217 ):
218 if (
219 isinstance(thing, type)
220 and issubclass(thing, unittest.TestCase)
221 ):
222 # look for test methods
223 for member_name in dir(thing):
224 if member_name.startswith("test"):
225 if callable(getattr(thing, member_name)):
226 tests.append(thing(member_name))
227 elif (
228 name.startswith("test")
229 or name.startswith("Test")
230 ):
231 if inspect.isfunction(thing):
232 function = thing
233 function_name = name
234 # things coming from execfile have no module
235 #print file_name, function_name, function.__module__
236 if function.__module__ is None:
237 tests.append(
238 nose.case.FunctionTestCase(function)
239 )
240 return tests
241 else:
242 return
243
244import re
245
246argv = [
247 #"--verbosity=2",
248 #"--debug=nose"
249]
25083
251# folder in which tests are kept84# folder in which tests are kept
252# non-option arguments (test paths) are made relative to this85# non-option arguments (test paths) are made relative to this
253test_root = os.path.join("applications", application_name, "tests", "unit_tests")86test_root = os.path.join(application_folder_path, "tests", "unit_tests")
87
88current_working_directory = os.getcwd()
25489
255disallowed_options = {}90disallowed_options = {}
256disallowed_options["-w"] = disallowed_options["--where"] = (91disallowed_options["-w"] = disallowed_options["--where"] = (
@@ -275,10 +110,10 @@
275 argv.append(arg)110 argv.append(arg)
276 else:111 else:
277 test_path = arg112 test_path = arg
278 test_fuller_path = os.path.join(test_root, test_path)113 test_folder_fuller_path = os.path.join(test_root, test_path)
279 test_folders.add(test_fuller_path)114 test_folders.add(test_folder_fuller_path)
280 if not os.path.exists(test_fuller_path):115 if not os.path.exists(test_folder_fuller_path):
281 print "\n", test_fuller_path, "not found"116 print "\n", test_folder_fuller_path, "not found"
282 #sys.exit(1)117 #sys.exit(1)
283118
284# test paths in command line aren't passed, just added to test_folders119# test paths in command line aren't passed, just added to test_folders
@@ -293,14 +128,15 @@
293128
294sys.argv[1:] = argv129sys.argv[1:] = argv
295130
131test_utils = local_import("test_utils")
132
296nose.main(133nose.main(
297# seems at least this version of nose ignores passed in argv134# seems at least this version of nose ignores passed in argv
298# argv = argv, 135# argv = argv,
299 addplugins = nose.plugins.PluginManager([136 addplugins = nose.plugins.PluginManager([
300 Web2pyNosePlugin(137 test_utils.Web2pyNosePlugin(
301 application_name,138 application_name,
302 globals(),139 web2py_environment,
303 use_test_db,
304 re.compile(140 re.compile(
305 re.escape(os.path.sep).join(141 re.escape(os.path.sep).join(
306 (142 (
@@ -311,7 +147,8 @@
311 "[^","]*)*)?)?)?$"147 "[^","]*)*)?)?)?$"
312 )148 )
313 )149 )
314 )150 ),
151 test_folders
315 )152 )
316 ])153 ])
317)154)
318155
=== added file 'private/prepopulate/default/tasks.cfg'
--- private/prepopulate/default/tasks.cfg 1970-01-01 00:00:00 +0000
+++ private/prepopulate/default/tasks.cfg 2011-09-06 11:51:25 +0000
@@ -0,0 +1,18 @@
1##########################################################################
2# Add a list of csv file to import into the system
3# the list of import file sis a comma separated list as follows:
4# "prefix","tablename","csv file name","stylesheet"
5#
6# The csv file is assumed to be in the same directory as this file
7# The style sheet is assumed to be in either of the following directories:
8# static/format/s3csv/"prefix"/
9# static/format/s3csv/
10#
11# For details on how to import data into the system see the following:
12# zzz_1st_run
13# s3Tools::S3BulkImporter
14##########################################################################
15"supply","catalog_item","DefaultItems.csv","supply_items.xsl"
16"supply","catalog_item","StandardItems.csv","supply_items.xsl"
17"hrm","skill","DefaultSkillList.csv","skill.xsl"
18"hrm","competency_rating",DefaultSkillCompetency.csv,competency_rating.xsl
0\ No newline at end of file19\ No newline at end of file
120
=== removed file 'private/prepopulate/default/tasks.cfg'
--- private/prepopulate/default/tasks.cfg 2011-08-17 15:13:43 +0000
+++ private/prepopulate/default/tasks.cfg 1970-01-01 00:00:00 +0000
@@ -1,18 +0,0 @@
1##########################################################################
2# Add a list of csv file to import into the system
3# the list of import file sis a comma separated list as follows:
4# "prefix","tablename","csv file name","stylesheet"
5#
6# The csv file is assumed to be in the same directory as this file
7# The style sheet is assumed to be in either of the following directories:
8# static/format/s3csv/"prefix"/
9# static/format/s3csv/
10#
11# For details on how to import data into the system see the following:
12# zzz_1st_run
13# s3Tools::S3BulkImporter
14##########################################################################
15"supply","catalog_item","DefaultItems.csv","supply_items.xsl"
16"supply","catalog_item","StandardItems.csv","supply_items.xsl"
17"hrm","skill","DefaultSkillList.csv","skill.xsl"
18"hrm","competency_rating",DefaultSkillCompetency.csv,competency_rating.xsl
190
=== modified file 'static/scripts/S3/s3.gis.climate.js'
--- static/scripts/S3/s3.gis.climate.js 2011-06-15 09:47:54 +0000
+++ static/scripts/S3/s3.gis.climate.js 2011-09-06 11:51:25 +0000
@@ -9,154 +9,372 @@
9 }9 }
10}10}
1111
12
13ClimateDataMapPlugin = function (config) {12ClimateDataMapPlugin = function (config) {
14 var self = this // so no this-clobbering13 var plugin = this // let's be explicit!
15 self.data_type_option_names = config.data_type_option_names14 plugin.data_type_option_names = config.data_type_option_names
16 self.parameter_names = config.parameter_names15 plugin.parameter_names = config.parameter_names
17 self.projected_option_type_names = config.projected_option_type_names16 plugin.year_min = config.year_min
18 self.year_min = config.year_min 17 plugin.year_max = config.year_max
19 self.year_max = config.year_max18
2019 plugin.data_type_label = config.data_type_label
21 self.data_type_label = config.data_type_label20 plugin.overlay_data_URL = config.overlay_data_URL
22 self.projected_option_type_label = config.projected_option_type_label21 plugin.chart_URL = config.chart_URL
2322 delete config
24 self.setup = function () { 23
25 var graphic = new OpenLayers.Layer.Image(24 plugin.setup = function () {
26 'Test Data',25 var overlay_layer = plugin.overlay_layer = new OpenLayers.Layer.Vector(
27 '/eden/climate/climate_image_overlay',26 'Climate data map overlay',
28 new OpenLayers.Bounds(8900000, 3020000, 9850000, 3580000),27 {
29// new OpenLayers.Bounds(-180, -88.759, 180, 88.759),28 isBaseLayer:false,
30 new OpenLayers.Size(249, 139),29 }
31 {30 );
32 // numZoomLevels: 3,31 map.addLayer(overlay_layer);
33 isBaseLayer:false,32
34 opacity: 0.5,33 // selection
35 transparent:true34 OpenLayers.Feature.Vector.style['default']['strokeWidth'] = '2'
36 }35 var selectCtrl = new OpenLayers.Control.SelectFeature(
37 );36 overlay_layer,
38 graphic.events.on({37 {
39 loadstart: function() {38 clickout: true,
40 OpenLayers.Console.log("loadstart");39 toggle: false,
41 },40 multiple: false,
42 loadend: function() {41 hover: false,
43 OpenLayers.Console.log("loadend");42 toggleKey: 'altKey',
44 }43 multipleKey: 'shiftKey',
45 });44 box: true,
46 map.addLayer(graphic);45 onSelect: function (feature) {
46 feature.style.strokeColor = 'black'
47 feature.style.strokeDashstyle = 'dash'
48 overlay_layer.drawFeature(feature)
49 },
50 onUnselect: function (feature) {
51 feature.style.strokeColor = 'none'
52 overlay_layer.drawFeature(feature)
53 },
54 }
55 );
56
57 map.addControl(selectCtrl);
58
59 selectCtrl.activate();
47 }60 }
48 self.addToMapWindow = function (items) {61 plugin.addToMapWindow = function (items) {
49 function toggle_projected_options() {62 var combo_box_size = {
50 $('#projected-options').toggle(63 width: 120,
51 $('#id_Projected').attr('checked') == 'checked'64 heigth:25
65 }
66
67 function make_combo_box(
68 data,
69 fieldLabel,
70 hiddenName
71 ) {
72 var options = []
73 each(
74 data,
75 function (option) {
76 options.push([option, option])
77 }
52 )78 )
53 }79 var combo_box = new Ext.form.ComboBox({
54 var climate_data_type_options = [];80 fieldLabel: fieldLabel,
55 each(81 hiddenName: hiddenName,
56 self.data_type_option_names,82 store: new Ext.data.SimpleStore({
57 function (option_name) {83 fields: ['name', 'option'],
58 var radio_button = new Ext.form.Radio({84 data: options
59 name: "data-type",
60 id: "id_%s" % option_name,
61 boxLabel: option_name,
62 checked: option_name == self.data_type_option_names[0],
63 })
64 radio_button.on({
65 change: toggle_projected_options
66 })
67 climate_data_type_options.push(radio_button)
68 }
69 )
70 var projected_options = [];
71 each(
72 self.projected_option_type_names,
73 function (projected_option_type_name) {
74 projected_options.push(
75 new Ext.form.Radio({
76 name: "projected-option-type",
77 id: "id_%s" % projected_option_type_name,
78 boxLabel: projected_option_type_name,
79 })
80 )
81 }
82 )
83 var projected_options_widget = new Ext.form.FieldSet({
84 title: self.projected_option_type_label,
85 items: [
86 new Ext.form.CheckboxGroup({
87 items: projected_options,
88 xtype: 'checkboxgroup',
89 columns: 1
90 })
91 ]
92 })
93
94 var climate_data_type_options = new Ext.form.FieldSet({
95 title: self.data_type_label,
96 items: [
97 new Ext.form.RadioGroup({
98 items: climate_data_type_options,
99 columns: 1,
100 })
101 ]
102 })
103
104 var parameter_options = [];
105 each(
106 self.parameter_names,
107 function (parameter_name) {
108 var checkbox = new Ext.form.Checkbox({
109 name: parameter_name,
110 id: "id_%s" % parameter_name,
111 boxLabel: parameter_name,
112 })
113 parameter_options.push(checkbox)
114 }
115 )
116
117 var parameters_widget = new Ext.form.FieldSet({
118 title: "Parameters",
119 items: [
120 new Ext.form.CheckboxGroup({
121 items: parameter_options,
122 xtype: 'checkboxgroup',
123 columns: 1
124 })
125 ]
126 })
127
128 var period_widget = new Ext.form.FieldSet({
129 title: "Period",
130 items: [
131 new Ext.form.NumberField({
132 fieldLabel: "From",
133 minValue: self.year_min,
134 maxValue: self.year_max,
135 value: self.year_min
136 }),85 }),
137 new Ext.form.NumberField({86 displayField: 'name',
138 fieldLabel: "To",87 typeAhead: true,
139 minValue: self.year_min,88 mode: 'local',
140 maxValue: self.year_max,89 triggerAction: 'all',
141 value: self.year_max90 emptyText:'Choose...',
142 })91 selectOnFocus:true
143 ]92 })
144 })93 combo_box.setSize(combo_box_size)
94 return combo_box
95 }
96 var data_type_combo_box = make_combo_box(
97 plugin.data_type_option_names,
98 'Data type',
99 'data_type'
100 )
101
102 var variable_combo_box = make_combo_box(
103 plugin.parameter_names,
104 'Variable',
105 'parameter'
106 )
107
108 var statistic_combo_box = make_combo_box(
109 ['Minimum','Maximum','Average'],
110 'Aggregate values',
111 'statistic'
112 )
113
145 var climate_data_panel = new Ext.FormPanel({114 var climate_data_panel = new Ext.FormPanel({
146 id: 'climate_data_panel',115 id: 'climate_data_panel',
147 title: 'Climate data',116 title: 'Climate data map overlay',
148 collapsible: true,117 collapsible: true,
149 collapseMode: 'mini',118 collapseMode: 'mini',
150 items: [{119 items: [{
151 region: 'center',120 region: 'center',
152 items: [121 items: [
153 climate_data_type_options,122 new Ext.form.FieldSet({
154 projected_options_widget,123 title: 'Data set',
155 parameters_widget,124 items: [
156 period_widget125 data_type_combo_box,
126 variable_combo_box
127 ]
128 }),
129 new Ext.form.FieldSet({
130 title: 'Period',
131 items: [
132 new Ext.form.NumberField({
133 fieldLabel: 'From',
134 name: 'from_date',
135 minValue: plugin.year_min,
136 maxValue: plugin.year_max,
137 value: plugin.year_min
138 }),
139 new Ext.form.NumberField({
140 fieldLabel: 'To',
141 name: 'to_date',
142 minValue: plugin.year_min,
143 maxValue: plugin.year_max,
144 value: plugin.year_max,
145 size: combo_box_size
146 })
147 ]
148 }),
149 new Ext.form.FieldSet({
150 title: 'Map overlay colours',
151 items: [
152 statistic_combo_box,
153 ]
154 })
157 ]155 ]
158 }]156 }]
159 });157 });
158
159 var update_map_layer_button = new Ext.Button({
160 text: 'Update map layer',
161 disabled: true,
162 handler: function() {
163 plugin.overlay_layer.destroyFeatures()
164
165 // request new features
166 var form_values = climate_data_panel.getForm().getValues()
167
168 // add new features
169 $.ajax({
170 url: plugin.overlay_data_URL,
171 data: {
172 data_type: form_values.data_type,
173 statistic: form_values.statistic,
174 parameter: form_values.parameter,
175 from_date: form_values.from_date,
176 to_date: form_values.to_date
177 },
178 success: function(feature_data, status_code) {
179 function Vector(geometry, attributes, style) {
180 style.strokeColor= 'none'
181 style.fillOpacity= 0.8
182 style.strokeWidth = 1
183
184 return new OpenLayers.Feature.Vector(
185 geometry, attributes, style
186 )
187 }
188 function Polygon(components) {
189 return new OpenLayers.Geometry.Polygon(components)
190 }
191 function Point(lon, lat) {
192 var point = new OpenLayers.Geometry.Point(lat, lon)
193 return point.transform(
194 S3.gis.proj4326,
195 S3.gis.projection_current
196 )
197 }
198 function LinearRing(point_list) {
199 point_list.push(point_list[0])
200 return new OpenLayers.Geometry.LinearRing(point_list)
201 }
202 eval('var data = '+feature_data)
203 $('#id_key_min_value').html(data.min)
204 $('#id_key_max_value').html(data.max)
205 plugin.overlay_layer.addFeatures(data.features)
206 }
207 });
208 }
209 });
210
211 function enable_update_layer_button_if_form_complete(
212 box, record, index
213 ) {
214 if (
215 !!data_type_combo_box.getValue() &&
216 !!variable_combo_box.getValue() &&
217 !!statistic_combo_box.getValue()
218 ) {
219 update_map_layer_button.enable()
220 }
221 }
222 data_type_combo_box.on(
223 'change',
224 enable_update_layer_button_if_form_complete
225 );
226 variable_combo_box.on(
227 'change',
228 enable_update_layer_button_if_form_complete
229 );
230 statistic_combo_box.on(
231 'change',
232 enable_update_layer_button_if_form_complete
233 );
234 climate_data_panel.addButton(update_map_layer_button)
235
236 var show_chart_button = new Ext.Button({
237 text: 'Show chart',
238 disabled: true,
239 handler: function() {
240 // create URL
241 var place_ids = []
242 each(
243 plugin.overlay_layer.selectedFeatures,
244 function (feature) {
245 place_ids.push(feature.data.id)
246 }
247 )
248 var form_values = climate_data_panel.getForm().getValues(),
249 data_type = form_values.data_type,
250 parameter = form_values.parameter,
251 from_date = form_values.from_date,
252 to_date = form_values.to_date,
253 place_ids = place_ids;
254
255 var spec = JSON.stringify({
256 data_type: data_type,
257 parameter: parameter,
258 from_date: from_date,
259 to_date: to_date,
260 place_ids: place_ids
261 })
262
263 var chart_name = [
264 data_type, parameter,
265 'from', from_date,
266 'to', to_date,
267 'for', (
268 place_ids.length < 3?
269 'places: '+ place_ids:
270 place_ids.length+' places'
271 )
272 ].join(' ')
273
274 // get hold of a chart manager instance
275 if (!plugin.chart_window) {
276 var chart_window = plugin.chart_window = window.open(
277 'climate/chart_popup.html',
278 'chart',
279 'width=660,height=600,toolbar=0,resizable=0'
280 )
281 chart_window.onload = function () {
282 chart_window.chart_manager = new chart_window.ChartManager(plugin.chart_URL)
283 chart_window.chart_manager.addChartSpec(spec, chart_name)
284 }
285 chart_window.onbeforeunload = function () {
286 delete plugin.chart_window
287 }
288 } else {
289 // some duplication here:
290 plugin.chart_window. chart_manager.addChartSpec(spec, chart_name)
291 }
292
293 }
294 });
295
296
297 function enable_show_chart_button_if_data_and_variable_selected(
298 box, record, index
299 ) {
300 if (
301 !!data_type_combo_box.getValue() &&
302 !!variable_combo_box.getValue()
303 ) {
304 show_chart_button.enable()
305 }
306 }
307
308 data_type_combo_box.on(
309 'change',
310 enable_show_chart_button_if_data_and_variable_selected
311 );
312
313 variable_combo_box.on(
314 'change',
315 enable_show_chart_button_if_data_and_variable_selected
316 );
317
318
319
320 climate_data_panel.addButton(show_chart_button)
321
160 items.push(climate_data_panel)322 items.push(climate_data_panel)
323
324 var key_panel = new Ext.Panel({
325 id: 'key_panel',
326 title: 'Key',
327 collapsible: true,
328 collapseMode: 'mini',
329 items: [
330 {
331 layout: {
332 type: 'table',
333 columns: 3,
334 },
335 defaults: {
336 width: '100%',
337 height: 20,
338 style: 'margin: 10px'
339 },
340 items: [
341 {
342 tag: 'span',
343 id: 'id_key_min_value',
344 style: 'margin: 5px; text-align: center;',
345 border: false,
346 items: [
347 {
348 html:'Min',
349 border: false
350 }
351 ]
352 },
353 new Ext.BoxComponent({
354 autoEl: {
355 tag: 'img',
356 width: 128,
357 height: 15,
358 src: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAAABCAYAAAAW0qa2AAAAc0lEQVQoz42QSw6AIAxEX40JEO9/VlBjxg3gD9TFpG06aTrPJjYlYBsNvCAALvce8JZrVuA6X6Snv+kpdwXBwAlsBiIoAQksV536Mr/te3rxDay4r2iNmAFwBcsdVwdfRagDwbC031M8op5j96L8RVEVYQf3hFgEX0OMvQAAAABJRU5ErkJggg=='
359 }
360 }),
361 {
362 tag: 'span',
363 id: 'id_key_max_value',
364 style: 'margin: 5px; text-align: center',
365 border: false,
366 items: [
367 {
368 html:'Max',
369 border: false
370 }
371 ]
372 }
373 ]
374 }
375 ]
376 })
377
378 items.push(key_panel)
161 }379 }
162}380}
163381
=== added file 'tests/__init__.py'
=== added directory 'tests/climate'
=== added file 'tests/climate/__init__.py'
--- tests/climate/__init__.py 1970-01-01 00:00:00 +0000
+++ tests/climate/__init__.py 2011-09-06 11:51:25 +0000
@@ -0,0 +1,101 @@
1
2ClimateDataPortal = local_import('ClimateDataPortal')
3
4def clear_tables():
5 ClimateDataPortal.place.truncate()
6 ClimateDataPortal.rainfall_mm.truncate()
7 ClimateDataPortal.temperature_celsius.truncate()
8 db.commit()
9#clear_tables()
10
11def frange(start, end, inc=1.0):
12 value = start
13 i = 0
The diff has been truncated for viewing.