Class: Pro::Indexer
- Inherits:
-
Object
- Object
- Pro::Indexer
- Defined in:
- lib/pro/indexer.rb
Overview
creates an index object from cache or by searching the file system
Constant Summary collapse
- CACHE_PATH =
File.("~/.proCache")
- INDEXER_LOCK_PATH =
File.("~/.proCacheLock")
Instance Method Summary collapse
-
#build_index ⇒ Object
scan the base directories for git repos and build an index then cache it returns an index.
-
#cache_index(index) ⇒ Object
serialize the index to a cache file.
-
#find_base_dirs ⇒ Object
Finds the base directory where repos are kept Checks the environment variable PRO_BASE and the file .proBase.
- #index ⇒ Object
- #index_process ⇒ Object
-
#index_repos(base) ⇒ Object
find all repos in a certain base directory returns an array of Repo objects.
- #index_repos_fast(base) ⇒ Object
-
#index_repos_slow(base) ⇒ Object
recursive walk in ruby.
-
#initialize ⇒ Indexer
constructor
A new instance of Indexer.
-
#read_cache ⇒ Object
unserializes the cache file and returns the index object.
-
#run_index_process ⇒ Object
spins off a background process to update the cache file.
-
#scan_bases ⇒ Object
add all git repos in all bases to the index.
-
#scan_into_index ⇒ Object
compile base directories and scan them use this info to create an index object and return it.
Constructor Details
#initialize ⇒ Indexer
Returns a new instance of Indexer.
10 11 12 13 |
# File 'lib/pro/indexer.rb', line 10 def initialize @base_dirs = find_base_dirs @low_cpu = false end |
Instance Method Details
#build_index ⇒ Object
scan the base directories for git repos and build an index then cache it returns an index
63 64 65 66 67 |
# File 'lib/pro/indexer.rb', line 63 def build_index index = scan_into_index cache_index(index) index end |
#cache_index(index) ⇒ Object
serialize the index to a cache file
70 71 72 73 74 75 |
# File 'lib/pro/indexer.rb', line 70 def cache_index(index) # TODO: atomic rename. Right now we just hope. File.open(CACHE_PATH, 'w' ) do |out| YAML::dump( index, out ) end end |
#find_base_dirs ⇒ Object
Finds the base directory where repos are kept Checks the environment variable PRO_BASE and the file .proBase
152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 |
# File 'lib/pro/indexer.rb', line 152 def find_base_dirs() bases = [] # check environment first base = ENV['PRO_BASE'] bases << base if base # next check proBase file path = ENV['HOME'] + "/.proBase" if File.exists?(path) # read lines of the pro base file bases += IO.read(path).split("\n").map {|p| File.(p.strip)} end # strip bases that do not exist # I know about select! but it doesn't exist in 1.8 bases = bases.select {|b| File.exists?(b)} # if no bases then return home bases << ENV['HOME'] if bases.empty? bases end |
#index ⇒ Object
15 16 17 18 19 20 21 22 23 24 25 |
# File 'lib/pro/indexer.rb', line 15 def index # most of the time the cache should exist if res = read_cache # index in the background for next time. run_index_process else STDERR.puts "Indexing... This should only happen after updating.".red res = build_index end res end |
#index_process ⇒ Object
49 50 51 52 53 54 55 56 57 58 |
# File 'lib/pro/indexer.rb', line 49 def index_process @low_cpu = true # create lock so no work duplicated begin File.open(INDEXER_LOCK_PATH, "w") {} build_index ensure File.delete(INDEXER_LOCK_PATH) end end |
#index_repos(base) ⇒ Object
find all repos in a certain base directory returns an array of Repo objects
97 98 99 100 101 102 103 |
# File 'lib/pro/indexer.rb', line 97 def index_repos(base) if system("which find > /dev/null") index_repos_fast(base) else index_repos_slow(base) end end |
#index_repos_fast(base) ⇒ Object
105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 |
# File 'lib/pro/indexer.rb', line 105 def index_repos_fast(base) Dir.chdir(base) git_paths = `find . -name .git`.lines # additionally, index repos symlinked directly from a base root dirs = `find -L . -maxdepth 1 -type d`.lines symlinks = `find . -maxdepth 1 -type l`.lines # intersect those two results dir_sl = dirs & symlinks dir_sl_git_paths = dir_sl. map {|path| path.chomp + '/.git'}. select {|path| File.exists?(path)} # turn the command outputs into a list of repos repos = [] (git_paths + dir_sl_git_paths).each do |git_path| next if git_path.empty? git_path = File.(git_path.chomp) path = File.dirname(git_path) repo_name = File.basename(path) repos << Repo.new(repo_name,path) end repos end |
#index_repos_slow(base) ⇒ Object
recursive walk in ruby
129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 |
# File 'lib/pro/indexer.rb', line 129 def index_repos_slow(base) STDERR.puts "WARNING: pro is indexing slowly, please install the 'find' command." repos = [] Find.find(base) do |path| target = path # additionally, index repos symlinked directly from a base root if FileTest.symlink?(path) next if File.dirname(path) != base target = File.readlink(path) end # dir must exist and be a git repo if FileTest.directory?(target) && File.exists?(path+"/.git") base_name = File.basename(path) repos << Repo.new(base_name,path) Find.prune end end repos end |
#read_cache ⇒ Object
unserializes the cache file and returns the index object
28 29 30 31 32 33 34 |
# File 'lib/pro/indexer.rb', line 28 def read_cache return nil unless File.readable_real?(CACHE_PATH) index = YAML::load_file(CACHE_PATH) return nil unless index.created_version == Pro::VERSION return nil unless index.base_dirs == @base_dirs index end |
#run_index_process ⇒ Object
spins off a background process to update the cache file
37 38 39 40 41 42 43 44 45 46 47 |
# File 'lib/pro/indexer.rb', line 37 def run_index_process readme, writeme = IO.pipe p1 = fork { # Stop cd function from blocking on fork STDOUT.reopen(writeme) readme.close index_process unless File.exists?(INDEXER_LOCK_PATH) } Process.detach(p1) end |
#scan_bases ⇒ Object
add all git repos in all bases to the index
86 87 88 89 90 91 92 |
# File 'lib/pro/indexer.rb', line 86 def scan_bases bases = {} @base_dirs.each do |base| bases[base] = index_repos(base) end bases end |