update to address some API deprecations
[gae-samples.git] / search / product_search_python / admin_handlers.py
blobcd940e3a253361ee9cf61c2afbf1f9723fb8ffa7
1 #!/usr/bin/env python
3 # Copyright 2012 Google Inc.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
17 """ Contains the admin request handlers for the app (those that require
18 administrative access).
19 """
21 import csv
22 import logging
23 import os
24 import urllib
25 import uuid
27 from base_handler import BaseHandler
28 import categories
29 import config
30 import docs
31 import errors
32 import models
33 import stores
34 import utils
36 from google.appengine.api import users
37 from google.appengine.ext.deferred import defer
38 from google.appengine.ext import ndb
39 from google.appengine.api import search
42 def reinitAll(sample_data=True):
43 """
44 Deletes all product entities and documents, essentially resetting the app
45 state, then loads in static sample data if requested. Hardwired for the
46 expected product types in the sample data.
47 (Re)loads store location data from stores.py as well.
48 This function is intended to be run 'offline' (e.g., via a Task Queue task).
49 As an extension to this functionality, the channel ID could be used to notify
50 when done."""
52 # delete all the product and review entities
53 review_keys = models.Review.query().fetch(keys_only=True)
54 ndb.delete_multi(review_keys)
55 prod_keys = models.Product.query().fetch(keys_only=True)
56 ndb.delete_multi(prod_keys)
57 # delete all the associated product documents in the doc and
58 # store indexes
59 docs.Product.deleteAllInProductIndex()
60 docs.Store.deleteAllInIndex()
61 # load in sample data if indicated
62 if sample_data:
63 logging.info('Loading product sample data')
64 # Load from csv sample files.
65 # The following are hardwired to the format of the sample data files
66 # for the two example product types ('books' and 'hd televisions')-- see
67 # categories.py
68 datafile = os.path.join('data', config.SAMPLE_DATA_BOOKS)
69 # books
70 reader = csv.DictReader(
71 open(datafile, 'r'),
72 ['pid', 'name', 'category', 'price',
73 'publisher', 'title', 'pages', 'author',
74 'description', 'isbn'])
75 importData(reader)
76 datafile = os.path.join('data', config.SAMPLE_DATA_TVS)
77 # tvs
78 reader = csv.DictReader(
79 open(datafile, 'r'),
80 ['pid', 'name', 'category', 'price',
81 'size', 'brand', 'tv_type',
82 'description'])
83 importData(reader)
85 # next create docs from store location info
86 loadStoreLocationData()
88 logging.info('Re-initialization complete.')
90 def loadStoreLocationData():
91 # create documents from store location info
92 # currently logs but otherwise swallows search errors.
93 slocs = stores.stores
94 for s in slocs:
95 logging.info("s: %s", s)
96 geopoint = search.GeoPoint(s[3][0], s[3][1])
97 fields = [search.TextField(name=docs.Store.STORE_NAME, value=s[1]),
98 search.TextField(name=docs.Store.STORE_ADDRESS, value=s[2]),
99 search.GeoField(name=docs.Store.STORE_LOCATION, value=geopoint)
101 d = search.Document(doc_id=s[0], fields=fields)
102 try:
103 add_result = search.Index(config.STORE_INDEX_NAME).put(d)
104 except search.Error:
105 logging.exception("Error adding document:")
108 def importData(reader):
109 """Import via the csv reader iterator using the specified batch size as set in
110 the config file. We want to ensure the batch is not too large-- we allow 100
111 rows/products max per batch."""
112 MAX_BATCH_SIZE = 100
113 rows = []
114 # index in batches
115 # ensure the batch size in the config file is not over the max or < 1.
116 batchsize = utils.intClamp(config.IMPORT_BATCH_SIZE, 1, MAX_BATCH_SIZE)
117 logging.debug('batchsize: %s', batchsize)
118 for row in reader:
119 if len(rows) == batchsize:
120 docs.Product.buildProductBatch(rows)
121 rows = [row]
122 else:
123 rows.append(row)
124 if rows:
125 docs.Product.buildProductBatch(rows)
128 class AdminHandler(BaseHandler):
129 """Displays the admin page."""
131 def buildAdminPage(self, notification=None):
132 # If necessary, build the app's product categories now. This is done only
133 # if there are no Category entities in the datastore.
134 models.Category.buildAllCategories()
135 tdict = {
136 'sampleb': config.SAMPLE_DATA_BOOKS,
137 'samplet': config.SAMPLE_DATA_TVS,
138 'update_sample': config.DEMO_UPDATE_BOOKS_DATA}
139 if notification:
140 tdict['notification'] = notification
141 self.render_template('admin.html', tdict)
143 @BaseHandler.logged_in
144 def get(self):
145 action = self.request.get('action')
146 if action == 'reinit':
147 # reinitialise the app data to the sample data
148 defer(reinitAll)
149 self.buildAdminPage(notification="Reinitialization performed.")
150 elif action == 'demo_update':
151 # update the sample data, from (hardwired) book update
152 # data. Demonstrates updating some existing products, and adding some new
153 # ones.
154 logging.info('Loading product sample update data')
155 # The following is hardwired to the known format of the sample data file
156 datafile = os.path.join('data', config.DEMO_UPDATE_BOOKS_DATA)
157 reader = csv.DictReader(
158 open(datafile, 'r'),
159 ['pid', 'name', 'category', 'price',
160 'publisher', 'title', 'pages', 'author',
161 'description', 'isbn'])
162 for row in reader:
163 docs.Product.buildProduct(row)
164 self.buildAdminPage(notification="Demo update performed.")
166 elif action == 'update_ratings':
167 self.update_ratings()
168 self.buildAdminPage(notification="Ratings update performed.")
169 else:
170 self.buildAdminPage()
172 def update_ratings(self):
173 """Find the products that have had an average ratings change, and need their
174 associated documents updated (re-indexed) to reflect that change; and
175 re-index those docs in batch. There will only
176 be such products if config.BATCH_RATINGS_UPDATE is True; otherwise the
177 associated documents will be updated right away."""
178 # get the pids of the products that need review info updated in their
179 # associated documents.
180 pkeys = models.Product.query(
181 models.Product.needs_review_reindex == True).fetch(keys_only=True)
182 # re-index these docs in batch
183 models.Product.updateProdDocsWithNewRating(pkeys)
186 class DeleteProductHandler(BaseHandler):
187 """Remove data for the product with the given pid, including that product's
188 reviews and its associated indexed document."""
190 @BaseHandler.logged_in
191 def post(self):
192 pid = self.request.get('pid')
193 if not pid: # this should not be reached
194 msg = 'There was a problem: no product id given.'
195 logging.error(msg)
196 url = '/'
197 linktext = 'Go to product search page.'
198 self.render_template(
199 'notification.html',
200 {'title': 'Error', 'msg': msg,
201 'goto_url': url, 'linktext': linktext})
202 return
204 # Delete the product entity within a transaction, and define transactional
205 # tasks for deleting the product's reviews and its associated document.
206 # These tasks will only be run if the transaction successfully commits.
207 def _tx():
208 prod = models.Product.get_by_id(pid)
209 if prod:
210 prod.key.delete()
211 defer(models.Review.deleteReviews, prod.key.id(), _transactional=True)
212 defer(
213 docs.Product.removeProductDocByPid,
214 prod.key.id(), _transactional=True)
216 ndb.transaction(_tx)
217 # indicate success
218 msg = (
219 'The product with product id %s has been ' +
220 'successfully removed.') % (pid,)
221 url = '/'
222 linktext = 'Go to product search page.'
223 self.render_template(
224 'notification.html',
225 {'title': 'Product Removed', 'msg': msg,
226 'goto_url': url, 'linktext': linktext})
229 class CreateProductHandler(BaseHandler):
230 """Handler to create a new product: this constitutes both a product entity
231 and its associated indexed document."""
233 def parseParams(self):
234 """Filter the param set to the expected params."""
236 pid = self.request.get('pid')
237 doc = docs.Product.getDocFromPid(pid)
238 params = {}
239 if doc: # populate default params from the doc
240 fields = doc.fields
241 for f in fields:
242 params[f.name] = f.value
243 else:
244 # start with the 'core' fields
245 params = {
246 'pid': uuid.uuid4().hex, # auto-generate default UID
247 'name': '',
248 'description': '',
249 'category': '',
250 'price': ''}
251 pf = categories.product_dict
252 # add the fields specific to the categories
253 for _, cdict in pf.iteritems():
254 temp = {}
255 for elt in cdict.keys():
256 temp[elt] = ''
257 params.update(temp)
259 for k, v in params.iteritems():
260 # Process the request params. Possibly replace default values.
261 params[k] = self.request.get(k, v)
262 return params
264 @BaseHandler.logged_in
265 def get(self):
266 params = self.parseParams()
267 self.render_template('create_product.html', params)
269 @BaseHandler.logged_in
270 def post(self):
271 self.createProduct(self.parseParams())
273 def createProduct(self, params):
274 """Create a product entity and associated document from the given params
275 dict."""
277 try:
278 product = docs.Product.buildProduct(params)
279 self.redirect(
280 '/product?' + urllib.urlencode(
281 {'pid': product.pid, 'pname': params['name'],
282 'category': product.category
284 except errors.Error as e:
285 logging.exception('Error:')
286 params['error_message'] = e.error_message
287 self.render_template('create_product.html', params)