57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
|
# File 'lib/osdn/cli/command/frs_upload.rb', line 57
def run
update_token
opts = GetoptLong.new(
[ '--dry-run', '-n', GetoptLong::NO_ARGUMENT ],
[ '--project', '-p', GetoptLong::REQUIRED_ARGUMENT ],
[ '--release', '-r', GetoptLong::REQUIRED_ARGUMENT ],
[ '--visibility', '-v', GetoptLong::REQUIRED_ARGUMENT ],
)
opts.each do |opt, arg|
case opt
when '--project'
arg.empty? or
@target_proj = arg
when '--visibility'
unless %w(public private hidden).member?(arg)
logger.fatal "Invalid visibility status: #{arg}"
exit
end
@visibility = arg
when '--dry-run'
@dry_run = true
end
end
@target_dir = Pathname.new(ARGV.shift || '.')
proj_info = api.get_project target_proj
Pathname.glob(@target_dir+'*').each do |pdir|
unless load_variables(pdir).package_id
logger.info "Createing new package '#{pdir.basename}'"
if @dry_run
pinfo = Hashie::Mash.new id: '(dry-run)', name: pdir.basename, url: '(dry-run)'
else
pinfo = api.create_package target_proj, pdir.basename, visibility: @visibility
update_variables pdir, package_id: pinfo.id
end
$stdout.puts "New package '#{pinfo.name}' has been created; #{pinfo.url}"
end
Pathname.glob(pdir + '*').each do |rdir|
vars = load_variables(rdir)
rinfo = nil
if vars.release_id
rinfo = api.get_release target_proj, target_package(rdir), target_release(rdir)
else vars.release_id
logger.info "Createing new release '#{rdir.basename}'"
if @dry_run
rinfo = Hashie::Mash.new id: '(dry-run)', name: rdir.basename, url: '(dry-run)', files: []
else
rinfo = api.create_release target_proj, target_package(rdir), rdir.basename, visibility: @visibility
update_variables rdir, release_id: rinfo.id
end
$stdout.puts "New release '#{rinfo.name}' has been created; #{rinfo.url}"
end
Pathname.glob(rdir + '*').each do |file|
if file.directory?
logger.error "Skip direcotry #{file}"
next
end
logger.debug "Calculating digest for #{file}..."
digests = {
sha256: hexdigest(Digest::SHA256, file),
sha1: hexdigest(Digest::SHA1, file),
md5: hexdigest(Digest::MD5, file),
}
if remote_f = rinfo.files.find { |f| f.name == file.basename.to_s }
if digests.find { |type, dig| dig != remote_f.send("digest_#{type}") }
logger.error "#{file} was changed from remote file! Please delete remote file before uploading new one."
end
logger.warn "Skip already uploaded file '#{file}'"
else
logger.info "Uploading file #{file} (#{file.size} bytes)"
if @dry_run
finfo = Hashie::Mash.new id: '(dry-run)', url: '(dry-run)'
else
logger.level <= Logger::INFO and
self.class._show_progress = true
fio = file.open
logger.info "Starting upload #{file}..."
finfo = api.create_release_file target_proj, target_package(rdir), target_release(rdir), fio, visibility: @visibility
fio.close
self.class._show_progress = false
if digests.find { |type, dig| dig != finfo.send("digest_#{type}") }
logger.error "File digests are mismatch! Upload file #{file} may be broken! Please check."
else
logger.info "Upload completed."
end
end
$stdout.puts "New file '#{file}' has been uploaded; #{finfo.url}"
end
end
end
end
end
|