1 # ##### BEGIN GPL LICENSE BLOCK #####
3 # This program is free software; you can redistribute it and/or
4 # modify it under the terms of the GNU General Public License
5 # as published by the Free Software Foundation; either version 2
6 # of the License, or (at your option) any later version.
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with this program; if not, write to the Free Software Foundation,
15 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17 # ##### END GPL LICENSE BLOCK #####
21 from importlib
import reload
25 categories
= reload(categories
)
27 bkit_oauth
= reload(bkit_oauth
)
28 version_checker
= reload(version_checker
)
29 tasks_queue
= reload(tasks_queue
)
30 rerequests
= reload(rerequests
)
32 from blenderkit
import paths
, utils
, categories
, ui
, bkit_oauth
, version_checker
, tasks_queue
, rerequests
35 from bpy
.app
.handlers
import persistent
37 from bpy
.props
import ( # TODO only keep the ones actually used when cleaning
46 from bpy
.types
import (
54 import requests
, os
, random
65 def check_errors(rdata
):
66 if rdata
.get('statusCode') == 401:
68 if rdata
.get('detail') == 'Invalid token.':
69 user_preferences
= bpy
.context
.preferences
.addons
['blenderkit'].preferences
70 if user_preferences
.api_key
!= '':
71 if user_preferences
.enable_oauth
:
72 bkit_oauth
.refresh_token_thread()
73 return False, rdata
.get('detail')
74 return False, 'Missing or wrong api_key in addon preferences'
79 thumb_sml_download_threads
= {}
80 thumb_full_download_threads
= {}
84 def refresh_token_timer():
85 ''' this timer gets run every time the token needs refresh. It refreshes tokens and also categories.'''
86 utils
.p('refresh timer')
87 user_preferences
= bpy
.context
.preferences
.addons
['blenderkit'].preferences
89 categories
.load_categories()
91 return max(3600, user_preferences
.api_key_life
- 3600)
95 def scene_load(context
):
96 wm
= bpy
.context
.window_manager
98 # following doesn't necessarily happen if version isn't checked yet or similar, first run.
99 # wm['bkit_update'] = version_checker.compare_versions(blenderkit)
100 categories
.load_categories()
101 if not bpy
.app
.timers
.is_registered(refresh_token_timer
):
102 bpy
.app
.timers
.register(refresh_token_timer
, persistent
=True, first_interval
=36000)
105 def fetch_server_data():
106 ''' download categories and addon version'''
107 if not bpy
.app
.background
:
108 user_preferences
= bpy
.context
.preferences
.addons
['blenderkit'].preferences
109 url
= paths
.BLENDERKIT_ADDON_URL
110 api_key
= user_preferences
.api_key
111 # Only refresh new type of tokens(by length), and only one hour before the token timeouts.
112 if user_preferences
.enable_oauth
and \
113 len(user_preferences
.api_key
)<38 and \
114 user_preferences
.api_key_timeout
<time
.time()+ 3600:
115 bkit_oauth
.refresh_token_thread()
118 categories
.fetch_categories_thread(api_key
)
121 @bpy.app
.handlers
.persistent
122 def timer_update(): # TODO might get moved to handle all blenderkit stuff.
124 global search_threads
125 # don't do anything while dragging - this could switch asset type during drag, and make results list length different,
126 # causing a lot of throuble literally.
127 if len(search_threads
) == 0 or bpy
.context
.scene
.blenderkitUI
.dragging
:
129 for thread
in search_threads
: # TODO this doesn't check all processes when one gets removed,
130 # but most of the time only one is running anyway
131 if not thread
[0].is_alive():
132 search_threads
.remove(thread
) #
133 icons_dir
= thread
[1]
134 scene
= bpy
.context
.scene
135 # these 2 lines should update the previews enum and set the first result as active.
136 s
= bpy
.context
.scene
137 asset_type
= thread
[2]
138 if asset_type
== 'model':
139 props
= scene
.blenderkit_models
140 json_filepath
= os
.path
.join(icons_dir
, 'model_searchresult.json')
141 search_name
= 'bkit model search'
142 if asset_type
== 'scene':
143 props
= scene
.blenderkit_scene
144 json_filepath
= os
.path
.join(icons_dir
, 'scene_searchresult.json')
145 search_name
= 'bkit scene search'
146 if asset_type
== 'material':
147 props
= scene
.blenderkit_mat
148 json_filepath
= os
.path
.join(icons_dir
, 'material_searchresult.json')
149 search_name
= 'bkit material search'
150 if asset_type
== 'brush':
151 props
= scene
.blenderkit_brush
152 json_filepath
= os
.path
.join(icons_dir
, 'brush_searchresult.json')
153 search_name
= 'bkit brush search'
159 props
.report
= str(reports
)
161 with
open(json_filepath
, 'r') as data_file
:
162 rdata
= json
.load(data_file
)
165 ok
, error
= check_errors(rdata
)
168 for r
in rdata
['results']:
169 # TODO remove this fix when filesSize is fixed.
170 # this is a temporary fix for too big numbers from the server.
172 r
['filesSize'] = int(r
['filesSize'] / 1024)
174 utils
.p('asset with no files-size')
175 if r
['assetType'] == asset_type
:
176 if len(r
['files']) > 0:
180 durl
, tname
= None, None
182 if f
['fileType'] == 'thumbnail':
183 tname
= paths
.extract_filename_from_url(f
['fileThumbnailLarge'])
184 small_tname
= paths
.extract_filename_from_url(f
['fileThumbnail'])
185 allthumbs
.append(tname
) # TODO just first thumb is used now.
188 for i
, t
in enumerate(allthumbs
):
189 tdict
['thumbnail_%i'] = t
190 if f
['fileType'] == 'blend':
191 durl
= f
['downloadUrl'].split('?')[0]
192 # fname = paths.extract_filename_from_url(f['filePath'])
195 tooltip
= generate_tooltip(r
)
196 asset_data
= {'thumbnail': tname
,
197 'thumbnail_small': small_tname
,
198 # 'thumbnails':allthumbs,
199 'download_url': durl
,
201 'asset_base_id': r
['assetBaseId'],
203 'asset_type': r
['assetType'],
206 'can_download': r
.get('canDownload', True),
207 'verification_status': r
['verificationStatus'],
208 'author_id': str(r
['author']['id'])
209 # 'author': r['author']['firstName'] + ' ' + r['author']['lastName']
210 # 'description': r['description'],
211 # 'author': r['description'],
213 asset_data
['downloaded'] = 0
215 # parse extra params needed for blender here
216 params
= params_to_dict(r
['parameters'])
218 if asset_type
== 'model':
219 if params
.get('boundBoxMinX') != None:
222 float(params
['boundBoxMinX']),
223 float(params
['boundBoxMinY']),
224 float(params
['boundBoxMinZ'])),
226 float(params
['boundBoxMaxX']),
227 float(params
['boundBoxMaxY']),
228 float(params
['boundBoxMaxZ']))
233 'bbox_min': (-.5, -.5, 0),
234 'bbox_max': (.5, .5, 1)
236 asset_data
.update(bbox
)
237 if asset_type
== 'material':
238 asset_data
['texture_size_meters'] = params
.get('textureSizeMeters', 1.0)
240 asset_data
.update(tdict
)
241 if r
['assetBaseId'] in scene
.get('assets used', {}).keys():
242 asset_data
['downloaded'] = 100
244 result_field
.append(asset_data
)
246 # results = rdata['results']
247 s
[search_name
] = result_field
248 s
['search results'] = result_field
249 s
[search_name
+ ' orig'] = rdata
250 s
['search results orig'] = rdata
252 ui_props
= bpy
.context
.scene
.blenderkitUI
253 if len(result_field
) < ui_props
.scrolloffset
:
254 ui_props
.scrolloffset
= 0
255 props
.is_searching
= False
256 props
.search_error
= False
257 props
.report
= 'Open assetbar to see %i results. ' % len(s
['search results'])
258 if len(s
['search results']) == 0:
259 tasks_queue
.add_task((ui
.add_report
, ('No matching results found.',)))
262 # if rdata['next'] != None:
263 # search(False, get_next = True)
265 print('error', error
)
267 props
.search_error
= True
269 # print('finished search thread')
270 mt('preview loading finished')
278 'MATERIAL': 'material',
279 'TEXTURE': 'texture',
282 scene
= bpy
.context
.scene
284 props
= scene
.blenderkitUI
286 directory
= paths
.get_temp_dir('%s_search' % mappingdict
[props
.asset_type
])
287 s
= bpy
.context
.scene
288 results
= s
.get('search results')
290 if results
is not None:
297 tpath
= os
.path
.join(directory
, r
['thumbnail_small'])
299 iname
= utils
.previmg_name(i
)
301 if os
.path
.exists(tpath
): # sometimes we are unlucky...
302 img
= bpy
.data
.images
.get(iname
)
304 img
= bpy
.data
.images
.load(tpath
)
306 elif img
.filepath
!= tpath
:
307 # had to add this check for autopacking files...
308 if img
.packed_file
is not None:
309 img
.unpack(method
='USE_ORIGINAL')
312 img
.colorspace_settings
.name
= 'Linear'
314 # print('previews loaded')
317 # line splitting for longer texts...
318 def split_subs(text
, threshold
=40):
321 # temporarily disable this, to be able to do this in drawing code
326 while len(text
) > threshold
:
327 i
= text
.rfind(' ', 0, threshold
)
328 i1
= text
.rfind(',', 0, threshold
)
329 i2
= text
.rfind('.', 0, threshold
)
333 lines
.append(text
[:i
])
339 def list_to_str(input):
341 for i
, text
in enumerate(input):
343 if i
< len(input) - 1:
348 def writeblock(t
, input, width
=40): # for longer texts
349 dlines
= split_subs(input, threshold
=width
)
350 for i
, l
in enumerate(dlines
):
355 def writeblockm(tooltip
, mdata
, key
='', pretext
=None, width
=40): # for longer texts
356 if mdata
.get(key
) == None:
360 if type(intext
) == list:
361 intext
= list_to_str(intext
)
362 if type(intext
) == float:
363 intext
= round(intext
, 3)
365 if intext
.rstrip() == '':
370 pretext
= pretext
+ ': '
371 text
= pretext
+ intext
372 dlines
= split_subs(text
, threshold
=width
)
373 for i
, l
in enumerate(dlines
):
374 tooltip
+= '%s\n' % l
379 def fmt_length(prop
):
380 prop
= str(round(prop
, 2)) + 'm'
384 def has(mdata
, prop
):
385 if mdata
.get(prop
) is not None and mdata
[prop
] is not None and mdata
[prop
] is not False:
391 def params_to_dict(params
):
394 params_dict
[p
['parameterType']] = p
['value']
398 def generate_tooltip(mdata
):
400 if type(mdata
['parameters']) == list:
401 mparams
= params_to_dict(mdata
['parameters'])
403 mparams
= mdata
['parameters']
405 t
= writeblock(t
, mdata
['name'], width
=col_w
)
408 t
= writeblockm(t
, mdata
, key
='description', pretext
='', width
=col_w
)
409 if mdata
['description'] != '':
412 bools
= (('rig', None), ('animated', None), ('manifold', 'non-manifold'), ('scene', None), ('simulation', None),
415 if mparams
.get(b
[0]):
416 mdata
['tags'].append(b
[0])
418 mdata
['tags'].append(b
[1])
420 bools_data
= ('adult',)
422 if mdata
.get(b
) and mdata
[b
]:
423 mdata
['tags'].append(b
)
424 t
= writeblockm(t
, mparams
, key
='designer', pretext
='designer', width
=col_w
)
425 t
= writeblockm(t
, mparams
, key
='manufacturer', pretext
='manufacturer', width
=col_w
)
426 t
= writeblockm(t
, mparams
, key
='designCollection', pretext
='design collection', width
=col_w
)
428 # t = writeblockm(t, mparams, key='engines', pretext='engine', width = col_w)
429 # t = writeblockm(t, mparams, key='model_style', pretext='style', width = col_w)
430 # t = writeblockm(t, mparams, key='material_style', pretext='style', width = col_w)
431 # t = writeblockm(t, mdata, key='tags', width = col_w)
432 # t = writeblockm(t, mparams, key='condition', pretext='condition', width = col_w)
433 # t = writeblockm(t, mparams, key='productionLevel', pretext='production level', width = col_w)
434 if has(mdata
, 'purePbr'):
435 t
= writeblockm(t
, mparams
, key
='pbrType', pretext
='pbr', width
=col_w
)
437 t
= writeblockm(t
, mparams
, key
='designYear', pretext
='design year', width
=col_w
)
439 if has(mparams
, 'dimensionX'):
440 t
+= 'size: %s, %s, %s\n' % (fmt_length(mparams
['dimensionX']),
441 fmt_length(mparams
['dimensionY']),
442 fmt_length(mparams
['dimensionZ']))
443 if has(mparams
, 'faceCount'):
444 t
+= 'face count: %s, render: %s\n' % (mparams
['faceCount'], mparams
['faceCountRender'])
446 # t = writeblockm(t, mparams, key='meshPolyType', pretext='mesh type', width = col_w)
447 # t = writeblockm(t, mparams, key='objectCount', pretext='nubmber of objects', width = col_w)
449 # t = writeblockm(t, mparams, key='materials', width = col_w)
450 # t = writeblockm(t, mparams, key='modifiers', width = col_w)
451 # t = writeblockm(t, mparams, key='shaders', width = col_w)
453 if has(mparams
, 'textureSizeMeters'):
454 t
+= 'texture size: %s\n' % fmt_length(mparams
['textureSizeMeters'])
456 if has(mparams
, 'textureResolutionMax') and mparams
['textureResolutionMax'] > 0:
457 if mparams
['textureResolutionMin'] == mparams
['textureResolutionMax']:
458 t
= writeblockm(t
, mparams
, key
='textureResolutionMin', pretext
='texture resolution', width
=col_w
)
460 t
+= 'tex resolution: %i - %i\n' % (mparams
['textureResolutionMin'], mparams
['textureResolutionMax'])
462 if has(mparams
, 'thumbnailScale'):
463 t
= writeblockm(t
, mparams
, key
='thumbnailScale', pretext
='preview scale', width
=col_w
)
465 # t += 'uv: %s\n' % mdata['uv']
467 # t = writeblockm(t, mdata, key='license', width = col_w)
469 # generator is for both upload preview and search, this is only after search
470 # if mdata.get('versionNumber'):
471 # # t = writeblockm(t, mdata, key='versionNumber', pretext='version', width = col_w)
472 # a_id = mdata['author'].get('id')
474 # adata = bpy.context.window_manager['bkit authors'].get(str(a_id))
476 # t += generate_author_textblock(adata)
479 if len(t
.split('\n')) < 6:
481 t
+= get_random_tip(mdata
)
486 def get_random_tip(mdata
):
488 rtips
= ['Click or drag model or material in scene to link/append ',
489 "Click on brushes to link them into scene.",
490 "All materials are free.",
491 "All brushes are free.",
492 "Locked models are available if you subscribe to Full plan.",
493 "Login to upload your own models, materials or brushes.",
494 "Use 'A' key to search assets by same author.",
495 "Use 'W' key to open Authors webpage.", ]
496 tip
= 'Tip: ' + random
.choice(rtips
)
497 t
= writeblock(t
, tip
)
499 # at = mdata['assetType']
500 # if at == 'brush' or at == 'texture':
501 # t += 'click to link %s' % mdata['assetType']
502 # if at == 'model' or at == 'material':
503 # tips = ['Click or drag in scene to link/append %s' % mdata['assetType'],
504 # "'A' key to search assets by same author",
505 # "'W' key to open Authors webpage",
507 # tip = 'Tip: ' + random.choice(tips)
508 # t = writeblock(t, tip)
512 def generate_author_textblock(adata
):
515 if adata
not in (None, ''):
517 if len(adata
['firstName'] + adata
['lastName']) > 0:
519 t
+= '%s %s\n' % (adata
['firstName'], adata
['lastName'])
521 if adata
.get('aboutMeUrl') is not None:
522 t
= writeblockm(t
, adata
, key
='aboutMeUrl', pretext
='', width
=col_w
)
524 if adata
.get('aboutMe') is not None:
525 t
= writeblockm(t
, adata
, key
='aboutMe', pretext
='', width
=col_w
)
530 def get_items_models(self
, context
):
531 global search_items_models
532 return search_items_models
535 def get_items_brushes(self
, context
):
536 global search_items_brushes
537 return search_items_brushes
540 def get_items_materials(self
, context
):
541 global search_items_materials
542 return search_items_materials
545 def get_items_textures(self
, context
):
546 global search_items_textures
547 return search_items_textures
550 class ThumbDownloader(threading
.Thread
):
553 def __init__(self
, url
, path
):
554 super(ThumbDownloader
, self
).__init
__()
557 self
._stop
_event
= threading
.Event()
560 self
._stop
_event
.set()
563 return self
._stop
_event
.is_set()
566 r
= rerequests
.get(self
.url
, stream
=False)
567 if r
.status_code
== 200:
568 with
open(self
.path
, 'wb') as f
:
570 # ORIGINALLY WE DOWNLOADED THUMBNAILS AS STREAM, BUT THIS WAS TOO SLOW.
571 # with open(path, 'wb') as f:
572 # for chunk in r.iter_content(1048576*4):
576 def write_author(a_id
, adata
):
577 # utils.p('writing author back')
578 authors
= bpy
.context
.window_manager
['bkit authors']
579 if authors
.get(a_id
) in (None, ''):
580 adata
['tooltip'] = generate_author_textblock(adata
)
581 authors
[a_id
] = adata
584 def fetch_author(a_id
, api_key
):
585 utils
.p('fetch author')
587 a_url
= paths
.get_api_url() + 'accounts/' + a_id
+ '/'
588 headers
= utils
.get_headers(api_key
)
589 r
= rerequests
.get(a_url
, headers
=headers
)
590 if r
.status_code
== 200:
592 if not hasattr(adata
, 'id'):
595 tasks_queue
.add_task((write_author
, (a_id
, adata
)))
596 if adata
.get('gravatarHash') is not None:
597 gravatar_path
= paths
.get_temp_dir(subdir
='g/') + adata
['gravatarHash'] + '.jpg'
598 url
= "https://www.gravatar.com/avatar/" + adata
['gravatarHash'] + '?d=404'
599 r
= rerequests
.get(url
, stream
=False)
600 if r
.status_code
== 200:
601 with
open(gravatar_path
, 'wb') as f
:
603 adata
['gravatarImg'] = gravatar_path
604 elif r
.status_code
== '404':
605 adata
['gravatarHash'] = None
606 utils
.p('gravatar for author not available.')
607 except Exception as e
:
609 utils
.p('finish fetch')
613 a_id
= str(r
['author']['id'])
614 preferences
= bpy
.context
.preferences
.addons
['blenderkit'].preferences
615 authors
= bpy
.context
.window_manager
.get('bkit authors', {})
617 bpy
.context
.window_manager
['bkit authors'] = authors
618 a
= authors
.get(a_id
)
619 if a
is None or a
is '' or \
620 (a
.get('gravatarHash') is not None and a
.get('gravatarImg') is None):
622 thread
= threading
.Thread(target
=fetch_author
, args
=(a_id
, preferences
.api_key
), daemon
=True)
627 def write_profile(adata
):
628 utils
.p('writing profile')
630 # we have to convert to MiB here, numbers too big for python int type
631 if user
.get('sumAssetFilesSize') is not None:
632 user
['sumAssetFilesSize'] /= (1024 * 1024)
633 if user
.get('sumPrivateAssetFilesSize') is not None:
634 user
['sumPrivateAssetFilesSize'] /= (1024 * 1024)
635 if user
.get('remainingPrivateQuota') is not None:
636 user
['remainingPrivateQuota'] /= (1024 * 1024)
638 bpy
.context
.window_manager
['bkit profile'] = adata
641 def request_profile(api_key
):
642 a_url
= paths
.get_api_url() + 'me/'
643 headers
= utils
.get_headers(api_key
)
644 r
= rerequests
.get(a_url
, headers
=headers
)
646 if adata
.get('user') is None:
648 utils
.p('getting profile failed')
653 def fetch_profile(api_key
):
654 utils
.p('fetch profile')
656 adata
= request_profile(api_key
)
657 if adata
is not None:
658 tasks_queue
.add_task((write_profile
, (adata
,)))
659 except Exception as e
:
664 preferences
= bpy
.context
.preferences
.addons
['blenderkit'].preferences
665 a
= bpy
.context
.window_manager
.get('bkit profile')
666 thread
= threading
.Thread(target
=fetch_profile
, args
=(preferences
.api_key
,), daemon
=True)
671 class Searcher(threading
.Thread
):
674 def __init__(self
, query
, params
):
675 super(Searcher
, self
).__init
__()
678 self
._stop
_event
= threading
.Event()
681 self
._stop
_event
.set()
684 return self
._stop
_event
.is_set()
693 mt('search thread started')
694 tempdir
= paths
.get_temp_dir('%s_search' % query
['asset_type'])
695 json_filepath
= os
.path
.join(tempdir
, '%s_searchresult.json' % query
['asset_type'])
697 headers
= utils
.get_headers(params
['api_key'])
700 rdata
['results'] = []
702 if params
['get_next']:
703 with
open(json_filepath
, 'r') as infile
:
705 origdata
= json
.load(infile
)
706 urlquery
= origdata
['next']
710 # in case no search results found on drive we don't do next page loading.
711 params
['get_next'] = False
712 if not params
['get_next']:
713 # build a new request
714 url
= paths
.get_api_url() + 'search/'
716 # build request manually
717 # TODO use real queries
718 requeststring
= '?query=' + query
['keywords'].lower() + '+'
720 for i
, q
in enumerate(query
):
721 requeststring
+= q
+ ':' + str(query
[q
]).lower()
722 if i
< len(query
) - 1:
725 # result ordering: _score - relevance, score - BlenderKit score
726 if query
.get('category_subtree') is not None:
727 requeststring
+= '+order:-score,_score'
729 requeststring
+= '+order:_score'
731 requeststring
+= '&addon_version=%s' % params
['addon_version']
732 if params
.get('scene_uuid') is not None:
733 requeststring
+= '&scene_uuid=%s' % params
['scene_uuid']
735 urlquery
= url
+ requeststring
739 r
= rerequests
.get(urlquery
, headers
=headers
)
742 except requests
.exceptions
.RequestException
as e
:
747 mt('response is back ')
750 except Exception as inst
:
756 # filter results here:
757 # todo remove this in future
759 for d
in rdata
.get('results', []):
760 # TODO this code is for filtering brush types, should vanish after we implement filter in Elastic
762 if query
['asset_type'] == 'brush':
763 for p
in d
['parameters']:
764 if p
['parameterType'] == 'mode':
766 if query
['asset_type'] != 'brush' or (
767 query
.get('brushType') != None and query
['brushType']) == mode
:
769 rdata
['results'] = nresults
771 # print('number of results: ', len(rdata.get('results', [])))
773 utils
.p('stopping search : ' + query
['keywords'])
776 mt('search finished')
779 thumb_small_urls
= []
780 thumb_small_filepaths
= []
782 thumb_full_filepaths
= []
785 for d
in rdata
.get('results', []):
786 if getting_authors
.get(d
['author']['id']) is None:
788 getting_authors
[d
['author']['id']] = True
791 # TODO move validation of published assets to server, too manmy checks here.
792 if f
['fileType'] == 'thumbnail' and f
['fileThumbnail'] != None and f
['fileThumbnailLarge'] != None:
793 if f
['fileThumbnail'] == None:
794 f
['fileThumbnail'] = 'NONE'
795 if f
['fileThumbnailLarge'] == None:
796 f
['fileThumbnailLarge'] = 'NONE'
798 thumb_small_urls
.append(f
['fileThumbnail'])
799 thumb_full_urls
.append(f
['fileThumbnailLarge'])
801 imgname
= paths
.extract_filename_from_url(f
['fileThumbnail'])
802 imgpath
= os
.path
.join(tempdir
, imgname
)
803 thumb_small_filepaths
.append(imgpath
)
805 imgname
= paths
.extract_filename_from_url(f
['fileThumbnailLarge'])
806 imgpath
= os
.path
.join(tempdir
, imgname
)
807 thumb_full_filepaths
.append(imgpath
)
809 sml_thbs
= zip(thumb_small_filepaths
, thumb_small_urls
)
810 full_thbs
= zip(thumb_full_filepaths
, thumb_full_urls
)
812 # we save here because a missing thumbnail check is in the previous loop
813 # we can also prepend previous results. These have downloaded thumbnails already...
814 if params
['get_next']:
815 rdata
['results'][0:0] = origdata
['results']
817 with
open(json_filepath
, 'w') as outfile
:
818 json
.dump(rdata
, outfile
)
821 for k
in thumb_sml_download_threads
.keys():
822 if k
not in thumb_small_filepaths
:
823 killthreads_sml
.append(k
) # do actual killing here?
825 killthreads_full
= []
826 for k
in thumb_full_download_threads
.keys():
827 if k
not in thumb_full_filepaths
:
828 killthreads_full
.append(k
) # do actual killing here?
829 # TODO do the killing/ stopping here! remember threads might have finished inbetween!
832 utils
.p('stopping search : ' + query
['keywords'])
835 # this loop handles downloading of small thumbnails
836 for imgpath
, url
in sml_thbs
:
837 if imgpath
not in thumb_sml_download_threads
and not os
.path
.exists(imgpath
):
838 thread
= ThumbDownloader(url
, imgpath
)
839 # thread = threading.Thread(target=download_thumbnail, args=([url, imgpath]),
842 thumb_sml_download_threads
[imgpath
] = thread
843 # threads.append(thread)
845 if len(thumb_sml_download_threads
) > maxthreads
:
846 while len(thumb_sml_download_threads
) > maxthreads
:
847 threads_copy
= thumb_sml_download_threads
.copy() # because for loop can erase some of the items.
848 for tk
, thread
in threads_copy
.items():
849 if not thread
.is_alive():
852 del (thumb_sml_download_threads
[tk
])
853 # utils.p('fetched thumbnail ', i)
856 utils
.p('stopping search : ' + query
['keywords'])
859 while len(thumb_sml_download_threads
) > 0:
860 threads_copy
= thumb_sml_download_threads
.copy() # because for loop can erase some of the items.
861 for tk
, thread
in threads_copy
.items():
862 if not thread
.is_alive():
864 del (thumb_sml_download_threads
[tk
])
868 utils
.p('stopping search : ' + query
['keywords'])
871 # start downloading full thumbs in the end
872 for imgpath
, url
in full_thbs
:
873 if imgpath
not in thumb_full_download_threads
and not os
.path
.exists(imgpath
):
874 thread
= ThumbDownloader(url
, imgpath
)
875 # thread = threading.Thread(target=download_thumbnail, args=([url, imgpath]),
878 thumb_full_download_threads
[imgpath
] = thread
879 mt('thumbnails finished')
882 def build_query_common(query
, props
):
884 "keywords": props
.search_keywords
886 query
.update(query_common
)
889 # def query_add_range(query, name, rmin, rmax):
891 def build_query_model():
892 '''use all search input to request results from server'''
894 props
= bpy
.context
.scene
.blenderkit_models
896 "asset_type": 'model',
897 # "engine": props.search_engine,
898 # "adult": props.search_adult,
900 if props
.search_style
!= 'ANY':
901 if props
.search_style
!= 'OTHER':
902 query
["model_style"] = props
.search_style
904 query
["model_style"] = props
.search_style_other
907 query
["is_free"] = True
909 if props
.search_advanced
:
910 if props
.search_condition
!= 'UNSPECIFIED':
911 query
["condition"] = props
.search_condition
912 if props
.search_design_year
:
913 query
["designYearMin"] = props
.search_design_year_min
914 query
["designYearMax"] = props
.search_design_year_max
915 if props
.search_polycount
:
916 query
["polyCountMin"] = props
.search_polycount_min
917 query
["polyCountMax"] = props
.search_polycount_max
918 if props
.search_texture_resolution
:
919 query
["textureResolutionMin"] = props
.search_texture_resolution_min
920 query
["textureResolutionMax"] = props
.search_texture_resolution_max
922 build_query_common(query
, props
)
927 def build_query_scene():
928 '''use all search input to request results from server'''
930 props
= bpy
.context
.scene
.blenderkit_scene
932 "asset_type": 'scene',
933 # "engine": props.search_engine,
934 # "adult": props.search_adult,
936 build_query_common(query
, props
)
940 def build_query_material():
941 props
= bpy
.context
.scene
.blenderkit_mat
943 "asset_type": 'material',
946 # if props.search_engine == 'NONE':
947 # query["engine"] = ''
948 # if props.search_engine != 'OTHER':
949 # query["engine"] = props.search_engine
951 # query["engine"] = props.search_engine_other
952 if props
.search_style
!= 'ANY':
953 if props
.search_style
!= 'OTHER':
954 query
["style"] = props
.search_style
956 query
["style"] = props
.search_style_other
957 build_query_common(query
, props
)
962 def build_query_texture():
963 props
= bpy
.context
.scene
.blenderkit_tex
965 "asset_type": 'texture',
969 if props
.search_style
!= 'ANY':
970 if props
.search_style
!= 'OTHER':
971 query
["search_style"] = props
.search_style
973 query
["search_style"] = props
.search_style_other
975 build_query_common(query
, props
)
980 def build_query_brush():
981 props
= bpy
.context
.scene
.blenderkit_brush
984 if bpy
.context
.sculpt_object
is not None:
985 brush_type
= 'sculpt'
987 elif bpy
.context
.image_paint_object
: # could be just else, but for future p
988 brush_type
= 'texture_paint'
991 "asset_type": 'brush',
993 "brushType": brush_type
996 build_query_common(query
, props
)
1002 global search_start_time
, prev_time
1003 alltime
= time
.time() - search_start_time
1004 since_last
= time
.time() - prev_time
1005 prev_time
= time
.time()
1006 utils
.p(text
, alltime
, since_last
)
1009 def add_search_process(query
, params
):
1010 global search_threads
1012 while (len(search_threads
) > 0):
1013 old_thread
= search_threads
.pop(0)
1014 old_thread
[0].stop()
1015 # TODO CARE HERE FOR ALSO KILLING THE THREADS...AT LEAST NOW SEARCH DONE FIRST WON'T REWRITE AN OLDER ONE
1017 tempdir
= paths
.get_temp_dir('%s_search' % query
['asset_type'])
1018 thread
= Searcher(query
, params
)
1021 search_threads
.append([thread
, tempdir
, query
['asset_type']])
1023 mt('thread started')
1026 def search(category
='', get_next
=False, author_id
=''):
1027 ''' initialize searching'''
1028 global search_start_time
1029 user_preferences
= bpy
.context
.preferences
.addons
['blenderkit'].preferences
1031 search_start_time
= time
.time()
1033 scene
= bpy
.context
.scene
1034 uiprops
= scene
.blenderkitUI
1036 if uiprops
.asset_type
== 'MODEL':
1037 if not hasattr(scene
, 'blenderkit'):
1039 props
= scene
.blenderkit_models
1040 query
= build_query_model()
1042 if uiprops
.asset_type
== 'SCENE':
1043 if not hasattr(scene
, 'blenderkit_scene'):
1045 props
= scene
.blenderkit_scene
1046 query
= build_query_scene()
1048 if uiprops
.asset_type
== 'MATERIAL':
1049 if not hasattr(scene
, 'blenderkit_mat'):
1051 props
= scene
.blenderkit_mat
1052 query
= build_query_material()
1054 if uiprops
.asset_type
== 'TEXTURE':
1055 if not hasattr(scene
, 'blenderkit_tex'):
1057 # props = scene.blenderkit_tex
1058 # query = build_query_texture()
1060 if uiprops
.asset_type
== 'BRUSH':
1061 if not hasattr(scene
, 'blenderkit_brush'):
1063 props
= scene
.blenderkit_brush
1064 query
= build_query_brush()
1066 if props
.is_searching
and get_next
== True:
1070 query
['category_subtree'] = category
1073 query
['author_id'] = author_id
1075 # utils.p('searching')
1076 props
.is_searching
= True
1079 'scene_uuid': bpy
.context
.scene
.get('uuid', None),
1080 'addon_version': version_checker
.get_addon_version(),
1081 'api_key': user_preferences
.api_key
,
1082 'get_next': get_next
1086 # query['keywords'] += '+is_free:true'
1088 add_search_process(query
, params
)
1089 tasks_queue
.add_task((ui
.add_report
, ('BlenderKit searching....', 2)))
1091 props
.report
= 'BlenderKit searching....'
1094 def search_update(self
, context
):
1095 utils
.p('search updater')
1096 if self
.search_keywords
!= '':
1100 class SearchOperator(Operator
):
1102 bl_idname
= "view3d.blenderkit_search"
1103 bl_label
= "BlenderKit asset search"
1104 bl_description
= "Search online for assets"
1105 bl_options
= {'REGISTER', 'UNDO', 'INTERNAL'}
1106 own
: BoolProperty(name
="own assets only",
1107 description
="Find all own assets",
1110 category
: StringProperty(
1112 description
="search only subtree of this category",
1114 options
= {'SKIP_SAVE'}
1117 author_id
: StringProperty(
1119 description
="Author ID - search only assets by this author",
1121 options
= {'SKIP_SAVE'}
1124 get_next
: BoolProperty(name
="next page",
1125 description
="get next page from previous search",
1127 options
= {'SKIP_SAVE'}
1130 keywords
: StringProperty(
1132 description
="Keywords",
1134 options
= {'SKIP_SAVE'}
1138 def poll(cls
, context
):
1141 def execute(self
, context
):
1142 # TODO ; this should all get transferred to properties of the search operator, so sprops don't have to be fetched here at all.
1143 sprops
= utils
.get_search_props()
1144 if self
.author_id
!= '':
1145 sprops
.search_keywords
= ''
1146 if self
.keywords
!= '':
1147 sprops
.search_keywords
= self
.keywords
1149 search(category
=self
.category
, get_next
=self
.get_next
, author_id
=self
.author_id
)
1150 # bpy.ops.view3d.blenderkit_asset_bar()
1160 def register_search():
1161 bpy
.app
.handlers
.load_post
.append(scene_load
)
1164 bpy
.utils
.register_class(c
)
1166 bpy
.app
.timers
.register(timer_update
, persistent
= True)
1168 categories
.load_categories()
1171 def unregister_search():
1172 bpy
.app
.handlers
.load_post
.remove(scene_load
)
1175 bpy
.utils
.unregister_class(c
)
1177 bpy
.app
.timers
.unregister(timer_update
)