Fixed issue #1220: ext/CrashServer/CommonLibs/Zlib/Zlib.vcproj immediate dir Win32...
[TortoiseGit.git] / src / TGitCache / FolderCrawler.cpp
blobd0f254b6be277d7356a041a62b857d9fe15855ea
1 // TortoiseGit - a Windows shell extension for easy version control
3 // External Cache Copyright (C) 2005-2008,2011 - TortoiseSVN
4 // Copyright (C) 2008-2012 - TortoiseGit
6 // This program is free software; you can redistribute it and/or
7 // modify it under the terms of the GNU General Public License
8 // as published by the Free Software Foundation; either version 2
9 // of the License, or (at your option) any later version.
11 // This program is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // You should have received a copy of the GNU General Public License
17 // along with this program; if not, write to the Free Software Foundation,
18 // 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 #include "StdAfx.h"
22 #include "foldercrawler.h"
23 #include "GitStatusCache.h"
24 #include "registry.h"
25 #include "TGitCache.h"
26 #include "shlobj.h"
27 #include "SysInfo.h"
30 CFolderCrawler::CFolderCrawler(void)
32 m_hWakeEvent = CreateEvent(NULL,FALSE,FALSE,NULL);
33 m_hTerminationEvent = CreateEvent(NULL,TRUE,FALSE,NULL);
34 m_lCrawlInhibitSet = 0;
35 m_crawlHoldoffReleasesAt = (long)GetTickCount();
36 m_bRun = false;
37 m_bPathsAddedSinceLastCrawl = false;
38 m_bItemsAddedSinceLastCrawl = false;
41 CFolderCrawler::~CFolderCrawler(void)
43 Stop();
46 void CFolderCrawler::Stop()
48 m_bRun = false;
49 if (m_hTerminationEvent)
51 SetEvent(m_hTerminationEvent);
52 if(WaitForSingleObject(m_hThread, 4000) != WAIT_OBJECT_0)
54 ATLTRACE("Error terminating crawler thread\n");
57 m_hThread.CloseHandle();
58 m_hTerminationEvent.CloseHandle();
59 m_hWakeEvent.CloseHandle();
62 void CFolderCrawler::Initialise()
64 // Don't call Initialize more than once
65 ATLASSERT(!m_hThread);
67 // Just start the worker thread.
68 // It will wait for event being signaled.
69 // If m_hWakeEvent is already signaled the worker thread
70 // will behave properly (with normal priority at worst).
72 m_bRun = true;
73 unsigned int threadId;
74 m_hThread = (HANDLE)_beginthreadex(NULL,0,ThreadEntry,this,0,&threadId);
75 SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_IDLE);
78 void CFolderCrawler::RemoveDuplicate(std::deque<CTGitPath> &list,const CTGitPath &path)
80 std::deque<CTGitPath>::iterator it, lastit;
81 for(it = list.begin(); it != list.end(); ++it)
83 if(*it == path)
85 list.erase(it);
86 it = list.begin(); /* search again*/
87 if(it == list.end())
88 break;
92 void CFolderCrawler::AddDirectoryForUpdate(const CTGitPath& path)
94 /* Index file changing*/
95 if( GitStatus::IsExistIndexLockFile((CString&)path.GetWinPathString()))
96 return;
98 if (!CGitStatusCache::Instance().IsPathGood(path))
99 return;
101 ATLTRACE(_T("AddDirectoryForUpdate %s\n"),path.GetWinPath());
103 AutoLocker lock(m_critSec);
105 m_foldersToUpdate.Push(path);
107 //ATLASSERT(path.IsDirectory() || !path.Exists());
108 // set this flag while we are sync'ed
109 // with the worker thread
110 m_bItemsAddedSinceLastCrawl = true;
112 //if (SetHoldoff())
113 SetEvent(m_hWakeEvent);
116 void CFolderCrawler::AddPathForUpdate(const CTGitPath& path)
118 /* Index file changing*/
119 if( GitStatus::IsExistIndexLockFile((CString&)path.GetWinPathString()))
120 return;
123 AutoLocker lock(m_critSec);
125 m_pathsToUpdate.Push(path);
126 m_bPathsAddedSinceLastCrawl = true;
128 //if (SetHoldoff())
129 SetEvent(m_hWakeEvent);
132 unsigned int CFolderCrawler::ThreadEntry(void* pContext)
134 ((CFolderCrawler*)pContext)->WorkerThread();
135 return 0;
138 void CFolderCrawler::WorkerThread()
140 HANDLE hWaitHandles[2];
141 hWaitHandles[0] = m_hTerminationEvent;
142 hWaitHandles[1] = m_hWakeEvent;
143 CTGitPath workingPath;
144 bool bFirstRunAfterWakeup = false;
145 DWORD currentTicks = 0;
147 for(;;)
149 bool bRecursive = !!(DWORD)CRegStdDWORD(_T("Software\\TortoiseGit\\RecursiveOverlay"), TRUE);
151 if (SysInfo::Instance().IsVistaOrLater())
153 SetThreadPriority(GetCurrentThread(), THREAD_MODE_BACKGROUND_END);
155 DWORD waitResult = WaitForMultipleObjects(_countof(hWaitHandles), hWaitHandles, FALSE, INFINITE);
157 // exit event/working loop if the first event (m_hTerminationEvent)
158 // has been signaled or if one of the events has been abandoned
159 // (i.e. ~CFolderCrawler() is being executed)
160 if(m_bRun == false || waitResult == WAIT_OBJECT_0 || waitResult == WAIT_ABANDONED_0 || waitResult == WAIT_ABANDONED_0+1)
162 // Termination event
163 break;
166 if (SysInfo::Instance().IsVistaOrLater())
168 SetThreadPriority(GetCurrentThread(), THREAD_MODE_BACKGROUND_BEGIN);
171 // If we get here, we've been woken up by something being added to the queue.
172 // However, it's important that we don't do our crawling while
173 // the shell is still asking for items
174 bFirstRunAfterWakeup = true;
175 for(;;)
177 if (!m_bRun)
178 break;
179 // Any locks today?
180 if (CGitStatusCache::Instance().m_bClearMemory)
182 CGitStatusCache::Instance().WaitToWrite();
183 CGitStatusCache::Instance().ClearCache();
184 CGitStatusCache::Instance().Done();
185 CGitStatusCache::Instance().m_bClearMemory = false;
187 if(m_lCrawlInhibitSet > 0)
189 // We're in crawl hold-off
190 ATLTRACE("Crawl hold-off\n");
191 Sleep(50);
192 continue;
194 if (bFirstRunAfterWakeup)
196 Sleep(20);
197 ATLTRACE("Crawl bFirstRunAfterWakeup\n");
198 bFirstRunAfterWakeup = false;
199 continue;
201 if ((m_blockReleasesAt < GetTickCount())&&(!m_blockedPath.IsEmpty()))
203 ATLTRACE(_T("Crawl stop blocking path %s\n"), m_blockedPath.GetWinPath());
204 m_blockedPath.Reset();
206 CGitStatusCache::Instance().RemoveTimedoutBlocks();
208 if ((m_foldersToUpdate.size() == 0) && (m_pathsToUpdate.size() == 0))
210 // Nothing left to do
211 break;
213 currentTicks = GetTickCount();
214 if (m_pathsToUpdate.size())
217 AutoLocker lock(m_critSec);
219 m_bPathsAddedSinceLastCrawl = false;
221 workingPath = m_pathsToUpdate.Pop();
222 if ((!m_blockedPath.IsEmpty()) && (m_blockedPath.IsAncestorOf(workingPath)))
224 // move the path to the end of the list
225 m_pathsToUpdate.Push(workingPath);
226 if (m_pathsToUpdate.size() < 3)
227 Sleep(50);
228 continue;
232 // don't crawl paths that are excluded
233 if (!CGitStatusCache::Instance().IsPathAllowed(workingPath))
234 continue;
235 // check if the changed path is inside an .git folder
236 CString projectroot;
237 if ((workingPath.HasAdminDir(&projectroot)&&workingPath.IsDirectory()) || workingPath.IsAdminDir())
239 // we don't crawl for paths changed in a tmp folder inside an .git folder.
240 // Because we also get notifications for those even if we just ask for the status!
241 // And changes there don't affect the file status at all, so it's safe
242 // to ignore notifications on those paths.
243 if (workingPath.IsAdminDir())
245 // TODO: add git specific filters here. is there really any change besides index file in .git
246 // that is relevant for overlays?
247 /*CString lowerpath = workingPath.GetWinPathString();
248 lowerpath.MakeLower();
249 if (lowerpath.Find(_T("\\tmp\\"))>0)
250 continue;
251 if (lowerpath.Find(_T("\\tmp")) == (lowerpath.GetLength()-4))
252 continue;
253 if (lowerpath.Find(_T("\\log"))>0)
254 continue;*/
255 // Here's a little problem:
256 // the lock file is also created for fetching the status
257 // and not just when committing.
258 // If we could find out why the lock file was changed
259 // we could decide to crawl the folder again or not.
260 // But for now, we have to crawl the parent folder
261 // no matter what.
263 //if (lowerpath.Find(_T("\\lock"))>0)
264 // continue;
265 // only go back to wc root if we are in .git-dir
268 workingPath = workingPath.GetContainingDirectory();
269 } while(workingPath.IsAdminDir());
271 else if (!workingPath.Exists())
273 CGitStatusCache::Instance().WaitToWrite();
274 CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
275 CGitStatusCache::Instance().Done();
276 continue;
279 if (!CGitStatusCache::Instance().IsPathGood(workingPath))
281 AutoLocker lock(m_critSec);
282 // move the path, the root of the repository, to the end of the list
283 if (projectroot.IsEmpty())
284 m_pathsToUpdate.Push(workingPath);
285 else
286 m_pathsToUpdate.Push(CTGitPath(projectroot));
287 if (m_pathsToUpdate.size() < 3)
288 Sleep(50);
289 continue;
292 ATLTRACE(_T("Invalidating and refreshing folder: %s\n"), workingPath.GetWinPath());
294 AutoLocker print(critSec);
295 _sntprintf_s(szCurrentCrawledPath[nCurrentCrawledpathIndex], MAX_CRAWLEDPATHSLEN, _TRUNCATE, _T("Invalidating and refreshing folder: %s"), workingPath.GetWinPath());
296 nCurrentCrawledpathIndex++;
297 if (nCurrentCrawledpathIndex >= MAX_CRAWLEDPATHS)
298 nCurrentCrawledpathIndex = 0;
300 InvalidateRect(hWnd, NULL, FALSE);
301 CGitStatusCache::Instance().WaitToRead();
302 // Invalidate the cache of this folder, to make sure its status is fetched again.
303 CCachedDirectory * pCachedDir = CGitStatusCache::Instance().GetDirectoryCacheEntry(workingPath);
304 if (pCachedDir)
306 git_wc_status_kind status = pCachedDir->GetCurrentFullStatus();
307 pCachedDir->Invalidate();
308 if (workingPath.Exists())
310 pCachedDir->RefreshStatus(bRecursive);
311 // if the previous status wasn't normal and now it is, then
312 // send a notification too.
313 // We do this here because GetCurrentFullStatus() doesn't send
314 // notifications for 'normal' status - if it would, we'd get tons
315 // of notifications when crawling a working copy not yet in the cache.
316 if ((status != git_wc_status_normal)&&(pCachedDir->GetCurrentFullStatus() != status))
318 CGitStatusCache::Instance().UpdateShell(workingPath);
319 ATLTRACE(_T("shell update in crawler for %s\n"), workingPath.GetWinPath());
322 else
324 CGitStatusCache::Instance().Done();
325 CGitStatusCache::Instance().WaitToWrite();
326 CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
329 CGitStatusCache::Instance().Done();
330 //In case that svn_client_stat() modified a file and we got
331 //a notification about that in the directory watcher,
332 //remove that here again - this is to prevent an endless loop
333 AutoLocker lock(m_critSec);
334 m_pathsToUpdate.erase(workingPath);
336 else if (workingPath.HasAdminDir())
338 if (!workingPath.Exists())
340 CGitStatusCache::Instance().WaitToWrite();
341 CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
342 CGitStatusCache::Instance().Done();
343 if (!workingPath.GetContainingDirectory().Exists())
344 continue;
345 else
346 workingPath = workingPath.GetContainingDirectory();
348 ATLTRACE(_T("Updating path: %s\n"), workingPath.GetWinPath());
350 AutoLocker print(critSec);
351 _sntprintf_s(szCurrentCrawledPath[nCurrentCrawledpathIndex], MAX_CRAWLEDPATHSLEN, _TRUNCATE, _T("Updating path: %s"), workingPath.GetWinPath());
352 nCurrentCrawledpathIndex++;
353 if (nCurrentCrawledpathIndex >= MAX_CRAWLEDPATHS)
354 nCurrentCrawledpathIndex = 0;
356 InvalidateRect(hWnd, NULL, FALSE);
357 // HasAdminDir() already checks if the path points to a dir
358 DWORD flags = TGITCACHE_FLAGS_FOLDERISKNOWN;
359 flags |= (workingPath.IsDirectory() ? TGITCACHE_FLAGS_ISFOLDER : 0);
360 flags |= (bRecursive ? TGITCACHE_FLAGS_RECUSIVE_STATUS : 0);
361 CGitStatusCache::Instance().WaitToRead();
362 // Invalidate the cache of folders manually. The cache of files is invalidated
363 // automatically if the status is asked for it and the file times don't match
364 // anymore, so we don't need to manually invalidate those.
365 if (workingPath.IsDirectory())
367 CCachedDirectory * cachedDir = CGitStatusCache::Instance().GetDirectoryCacheEntry(workingPath);
368 if (cachedDir)
369 cachedDir->Invalidate();
371 CStatusCacheEntry ce = CGitStatusCache::Instance().GetStatusForPath(workingPath, flags);
372 if (ce.GetEffectiveStatus() > git_wc_status_unversioned)
374 CGitStatusCache::Instance().UpdateShell(workingPath);
375 ATLTRACE(_T("shell update in folder crawler for %s\n"), workingPath.GetWinPath());
377 CGitStatusCache::Instance().Done();
378 AutoLocker lock(m_critSec);
379 m_pathsToUpdate.erase(workingPath);
381 else
383 if (!workingPath.Exists())
385 CGitStatusCache::Instance().WaitToWrite();
386 CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
387 CGitStatusCache::Instance().Done();
391 else if (m_foldersToUpdate.size())
394 AutoLocker lock(m_critSec);
395 m_bItemsAddedSinceLastCrawl = false;
397 // create a new CTSVNPath object to make sure the cached flags are requested again.
398 // without this, a missing file/folder is still treated as missing even if it is available
399 // now when crawling.
400 workingPath = CTGitPath(m_foldersToUpdate.Pop().GetWinPath());
402 if ((!m_blockedPath.IsEmpty())&&(m_blockedPath.IsAncestorOf(workingPath)))
404 // move the path to the end of the list
405 m_foldersToUpdate.Push(workingPath);
406 if (m_foldersToUpdate.size() < 3)
407 Sleep(50);
408 continue;
411 if (DWORD(workingPath.GetCustomData()) >= currentTicks)
413 Sleep(50);
414 continue;
416 if ((!m_blockedPath.IsEmpty())&&(m_blockedPath.IsAncestorOf(workingPath)))
417 continue;
418 if (!CGitStatusCache::Instance().IsPathAllowed(workingPath))
419 continue;
420 if (!CGitStatusCache::Instance().IsPathGood(workingPath))
421 continue;
423 ATLTRACE(_T("Crawling folder: %s\n"), workingPath.GetWinPath());
425 AutoLocker print(critSec);
426 _sntprintf_s(szCurrentCrawledPath[nCurrentCrawledpathIndex], MAX_CRAWLEDPATHSLEN, _TRUNCATE, _T("Crawling folder: %s"), workingPath.GetWinPath());
427 nCurrentCrawledpathIndex++;
428 if (nCurrentCrawledpathIndex >= MAX_CRAWLEDPATHS)
429 nCurrentCrawledpathIndex = 0;
431 InvalidateRect(hWnd, NULL, FALSE);
432 CGitStatusCache::Instance().WaitToRead();
433 // Now, we need to visit this folder, to make sure that we know its 'most important' status
434 CCachedDirectory * cachedDir = CGitStatusCache::Instance().GetDirectoryCacheEntry(workingPath.GetDirectory());
435 // check if the path is monitored by the watcher. If it isn't, then we have to invalidate the cache
436 // for that path and add it to the watcher.
437 if (!CGitStatusCache::Instance().IsPathWatched(workingPath))
439 if (workingPath.HasAdminDir())
441 ATLTRACE(_T("Add watch path %s\n"), workingPath.GetWinPath());
442 CGitStatusCache::Instance().AddPathToWatch(workingPath);
444 if (cachedDir)
445 cachedDir->Invalidate();
446 else
448 CGitStatusCache::Instance().Done();
449 CGitStatusCache::Instance().WaitToWrite();
450 CGitStatusCache::Instance().RemoveCacheForPath(workingPath);
451 // now cacheDir is invalid because it got deleted in the RemoveCacheForPath() call above.
452 cachedDir = NULL;
455 if (cachedDir)
456 cachedDir->RefreshStatus(bRecursive);
458 // While refreshing the status, we could get another crawl request for the same folder.
459 // This can happen if the crawled folder has a lower status than one of the child folders
460 // (recursively). To avoid double crawlings, remove such a crawl request here
461 AutoLocker lock(m_critSec);
462 if (m_bItemsAddedSinceLastCrawl)
464 m_foldersToUpdate.erase(workingPath);
467 CGitStatusCache::Instance().Done();
471 _endthread();
474 bool CFolderCrawler::SetHoldoff(DWORD milliseconds /* = 100*/)
476 long tick = (long)GetTickCount();
477 bool ret = ((tick - m_crawlHoldoffReleasesAt) > 0);
478 m_crawlHoldoffReleasesAt = tick + milliseconds;
479 return ret;
482 void CFolderCrawler::BlockPath(const CTGitPath& path, DWORD ticks)
484 ATLTRACE(_T("block path %s from being crawled\n"), path.GetWinPath());
485 m_blockedPath = path;
486 if (ticks == 0)
487 m_blockReleasesAt = GetTickCount()+10000;
488 else
489 m_blockReleasesAt = GetTickCount()+ticks;