Module: OpenTox::Application::Helpers

Defined in:
helper.rb,
helper_isatab.rb,
helper_unformatted.rb

File and Directory Helpers collapse

Helpers to link FTP data collapse

Instance Method Summary collapse

Instance Method Details

#cacheString

Returns absolute path investigation cache

Returns:

  • (String)

    absolute path investigation cache


50
51
52
# File 'helper.rb', line 50

def cache
  File.join dir,"cache"
end

#check_get_access(result) ⇒ Object


272
273
274
275
276
277
278
279
280
281
282
283
284
# File 'helper.rb', line 272

def check_get_access result
  if get_pi == "protocol_service"
    return result
  end
  result.empty? ? (bad_request_error "Nothing received from backend.") : (out = JSON.parse(result))
  getaccess = getaccess_uris
  unless getaccess.empty?
    out["results"]["bindings"].delete_if{|i| !getaccess.include?(i["investigation"]["value"])}
  else
    out["results"]["bindings"].clear
  end
  return JSON.pretty_generate(out)
end

#clean_params(type) ⇒ Object

clean up parameters


9
10
11
12
13
14
15
16
17
18
19
20
# File 'helper_unformatted.rb', line 9

def clean_params type
  inv_types = ['noData', 'unformattedData', 'ftpData']
  param_types = ['owningPro', 'title', 'abstract', 'owningOrg', 'authors', 'keywords', 'ftpFile']
  param_types.delete("ftpFile") if type == "noftp"
  param_uris = {:owningPro => params[:owningPro], :owningOrg => params[:owningOrg], :authors => params[:authors], :keywords => params[:keywords]}
  bad_request_error "Investigation type '#{params[:type]}' not supported." unless inv_types.include? params[:type]
  param_types.delete("owningPro")
  param_types.each{|p| bad_request_error "Parameter '#{p}' is required." if params[p.to_sym].blank?}
  bad_request_error "Parameter 'owningOrg' requires single entry." if (params[:owningOrg].split(",").size > 1)
  param_uris.delete_if{|k, v| v.nil? }
  param_uris.each{|key, value| value.gsub(/,\s/, ",").split(",").each{|v| validate_params_uri(key, v) ? next : (bad_request_error "'#{v}' is not a valid URI.")}}
end

#create_cacheObject


59
60
61
62
63
64
65
# File 'helper.rb', line 59

def create_cache
  FileUtils.mkdir_p cache
  Dir.chdir cache
  # empty JSON object
  string = "{\n  \"head\": {\n    \"vars\": [\n      \"biosample\",\n      \"sample\",\n      \"factorname\",\n      \"value\",\n      \"ontouri\",\n      \"unitOnto\",\n      \"unit\",\n      \"unitID\",\n      \"characteristics\"\n    ]\n  },\n  \"results\": {\n    \"bindings\": [\n\n    ]\n  }\n}"
  File.open(File.join(dashboard), 'w') {|f| f.write(string) }
end

#create_policy(ldaptype, uristring) ⇒ Object

creates XML policy file for user or group

Parameters:

  • ldaptype (String)

    is 'user' or 'group'

  • uristring (String)

    URI of user/group in user service

See Also:


92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
# File 'helper.rb', line 92

def create_policy ldaptype, uristring
  filename = File.join(dir, "#{ldaptype}_policies")
  policyfile = File.open(filename,"w")
  uriarray = uristring if uristring.class == Array
  uriarray = uristring.gsub(/[\[\]\"]/ , "").split(",") if uristring.class == String
  if uriarray.size > 0
    uriarray.each do |u|
      tbaccount = OpenTox::TBAccount.new(u)
      policyfile.puts tbaccount.get_policy(investigation_uri)
    end
    policyfile.close
    policytext = File.read filename
    replace = policytext.gsub!("</Policies>\n<!DOCTYPE Policies PUBLIC \"-//Sun Java System Access Manager7.1 2006Q3 Admin CLI DTD//EN\" \"jar://com/sun/identity/policy/policyAdmin.dtd\">\n<Policies>\n", "")
    File.open(filename, "w") { |file| file.puts replace } if replace
    Authorization.reset_policies investigation_uri, ldaptype
    ret = Authorization.create_policy(File.read(policyfile))
    File.delete policyfile if ret
  else
    Authorization.reset_policies investigation_uri, ldaptype
  end
end

#dashboardString

Returns absolute path investigation dashboard file

Returns:

  • (String)

    absolute path investigation dashboard file


55
56
57
# File 'helper.rb', line 55

def dashboard
  File.join cache, "dashboard"
end

#dashboard_cacheObject

create dashboard cache


123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
# File 'helper_isatab.rb', line 123

def dashboard_cache
  templates = get_templates "investigation"
  sparqlstring = File.read(templates["factorvalues_by_investigation"]) % { :investigation_uri => investigation_uri }
  factorvalues = OpenTox::Backend::FourStore.query sparqlstring, "application/json"
  @result = JSON.parse(factorvalues)
  bindings = @result["results"]["bindings"]
  unless bindings.blank?
    # init arrays; a = by sample_uri; b = compare samples; c = uniq result
    a = []; b = []; c = []
    bindings.each{|b| a << bindings.map{|x| x if x["sample"]["value"] == b["sample"]["value"]}.compact }
    # compare and uniq sample [compound, dose, time]
    a.each do |sample|
      @collected_values = []
      sample.each do |s|
        compound = s["value"]["value"] if s["factorname"]["value"] =~ /compound/i
        dose = s["value"]["value"] if s["factorname"]["value"] =~ /dose/i
        time = s["value"]["value"] if s["factorname"]["value"] =~ /time/i
        @collected_values << [compound, dose, time]
      end
      collected_values = @collected_values.flatten.compact
      if !b.include?(collected_values)
        b << collected_values
        c << sample
      end
    end
    # clear original bindings
    @result["results"]["bindings"].clear
    # add new bindings
    @result["results"]["bindings"] = c.flatten!
    
    # add biosample characteristics
    biosamples = @result["results"]["bindings"].map{|n| n["biosample"]["value"]}
    # add new JSON head
    @result["head"]["vars"] << "characteristics"
    biosamples.uniq.each do |biosample|
      sparqlstring = File.read(templates["characteristics_by_sample"]) % { :sample_uri => biosample }
      sample = OpenTox::Backend::FourStore.query sparqlstring, "application/json"
      result = JSON.parse(sample)
      # adding single biosample characteristics to JSON array
      @result["results"]["bindings"].find{|n| n["characteristics"] = result["results"]["bindings"] if n["biosample"]["value"].to_s == biosample.to_s }
    end
    # add sample characteristics
    samples = @result["results"]["bindings"].map{|n| n["sample"]["value"]}
    # add new JSON head
    @result["head"]["vars"] << "sampleChar"
    samples.uniq.each do |sample|
      sparqlstring = File.read(templates["characteristics_by_sample"]) % { :sample_uri => sample }
      response = OpenTox::Backend::FourStore.query sparqlstring, "application/json"
      result = JSON.parse(response)
      # adding single sample characteristics to JSON array
      @result["results"]["bindings"].find{|n| n["sampleChar"] = result["results"]["bindings"] if n["sample"]["value"].to_s == sample.to_s}
    end
    @result["results"]["bindings"].each{|n| n["characteristics"] ||= [] }
    @result["results"]["bindings"].each{|n| n["sampleChar"] ||= [] }
    # result to JSON
    result = JSON.pretty_generate(@result)
    # write result to dashboard_file
    replace_cache result
  else
    $logger.error "Unable to create dashboard file for investigation #{params[:id]}"
  end
end

#delete_investigation_policyObject

deletes all policies of an investigation


82
83
84
85
86
# File 'helper.rb', line 82

def delete_investigation_policy
  if RestClientWrapper.subjectid and !File.exists?(dir) and investigation_uri
    res = OpenTox::Authorization.delete_policies_from_uri(investigation_uri)
  end
end

#dirString

Returns absolute investigation dir path

Returns:

  • (String)

    absolute investigation dir path


30
31
32
# File 'helper.rb', line 30

def dir
  File.join File.dirname(File.expand_path __FILE__), "investigation", params[:id].to_s
end

#extract_zipObject

extract zip upload to tmp subdirectory of investigation


34
35
36
37
38
39
40
41
42
43
44
45
46
# File 'helper_isatab.rb', line 34

def extract_zip
  unless `jar -tvf '#{File.join(tmp,params[:file][:filename])}'`.to_i == 0
    `unzip -o '#{File.join(tmp,params[:file][:filename])}'  -x '__MACOSX/*' -d #{tmp}`
    Dir["#{tmp}/*"].collect{|d| d if File.directory?(d)}.compact.each  do |d|
      `mv #{d}/* #{tmp}`
      `rmdir #{d}`
    end
  else
    FileUtils.remove_entry dir
    bad_request_error "Could not parse isatab file. Empty directory submitted."
  end
  replace_pi
end

#fileString

Returns file name with absolute path

Returns:

  • (String)

    file name with absolute path


40
41
42
# File 'helper.rb', line 40

def file
  File.join dir, params[:filename]
end

#get_cacheObject


67
68
69
# File 'helper.rb', line 67

def get_cache
  File.exist?(File.join dashboard) ? IO.read(File.join dashboard) : dashboard_cache
end

#get_datafilesObject

get an array of data files in an investigation

Parameters:

  • type (Array)

    investigation data files names


233
234
235
236
237
238
239
240
241
# File 'helper.rb', line 233

def get_datafiles
  response = OpenTox::RestClientWrapper.get "#{investigation_uri}/sparql/files_with_datanodes_by_investigation", {}, {:accept => "application/json"}
  result = JSON.parse(response)
  files = result["results"]["bindings"].map{|n| "#{n["file"]["value"]}"}
  datanodes = result["results"]["bindings"].map{|n| "#{n["datanode"]["value"]}"}
  @datahash = {}
  result["results"]["bindings"].each{ |f| @datahash[(f["file"]["value"]).gsub(/(ftp:\/\/|)#{URI($investigation[:uri]).host}\//,"")] = ["#{f["datanode"]["value"]}"] }
  return files.flatten
end

#get_ftpfilesObject

get an array of files in ftp folder of a user


245
246
247
248
249
250
# File 'helper.rb', line 245

def get_ftpfiles
  user = Authorization.get_user
  return [] if  !Dir.exists?("/home/ftpusers/#{user}") || user.nil?
  files = Dir.chdir("/home/ftpusers/#{user}") { Dir.glob("**/*").map{|path| File.expand_path(path) } }.reject{ |p| File.directory? p }
  Hash[files.collect { |f| [f.gsub("/home/ftpusers/#{user}/",""), File.basename(f)] }]
end

#get_permissionObject

manage Get requests with policies and flags.


175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
# File 'helper.rb', line 175

def get_permission
  return false if request.env['REQUEST_METHOD'] != "GET"
  uri = to(request.env['REQUEST_URI'])
  curi = clean_uri(uri)
  return true if uri == $investigation[:uri]
  return true if OpenTox::Authorization.get_user == "protocol_service"
  return true if OpenTox::Authorization.uri_owner?(curi)
  if (request.env['REQUEST_URI'] =~ /investigation\/sparql/ || request.env['REQUEST_URI'] =~ /investigation\/ftpfiles/) # give permission to user groups defined in policies
    return true if OpenTox::Authorization.authorized?("#{$investigation[:uri]}", "GET")
  end
  if (request.env['REQUEST_URI'] =~ /metadata/ ) || (request.env['REQUEST_URI'] =~ /protocol/ )
    return true if qfilter("isSummarySearchable", curi) =~ /#{curi}/ && qfilter("isPublished", curi) =~ /#{curi}/
  end
  return true if OpenTox::Authorization.authorized?(curi, "GET") && qfilter("isPublished", curi) =~ /#{curi}/
  return false
end

#get_piObject

get the user name


32
33
34
35
36
# File 'helper_unformatted.rb', line 32

def get_pi
  user = OpenTox::Authorization.get_user
  accounturi = `curl -Lk -X GET -H "Accept:text/uri-list" -H "subjectid:#{RestClientWrapper.subjectid}" #{$user_service[:uri]}/user?username=#{user}`.chomp.sub("\n","")
  accounturi
end

#get_templates(type = "") ⇒ Object

get SPARQL template hash of templatename => templatefile

Parameters:

  • type (String) (defaults to: "")

    template subdirectory


223
224
225
226
227
228
# File 'helper.rb', line 223

def get_templates type=""
  templates = {}
  filenames = Dir[File.join File.dirname(File.expand_path __FILE__), "template/#{type}/*.sparql".gsub("//","/")]
  filenames.each{ |filename| templates[File.basename(filename, ".sparql")]=filename}
  return templates
end

#get_timestamp(timestring) ⇒ Integer

Returns timestamp of a time string

Returns:

  • (Integer)

    timestamp of a time string


77
78
79
# File 'helper.rb', line 77

def get_timestamp timestring
  Time.parse(timestring).to_i
end

#getaccess_urisObject

uris with GET permission for user


261
262
263
264
265
266
267
268
269
270
# File 'helper.rb', line 261

def getaccess_uris
  a = Time.now
  out = []
  uri_list.split("\n").each do |u|
    out << u if OpenTox::Authorization.authorized?(u, "GET")
  end
  b = Time.now
  $logger.debug "Duration to get biosearch uris: #{(b-a).round(3)}\n#{out}"
  out
end

#investigation_typeObject

get non-isatab investigation type


168
169
170
171
172
# File 'helper.rb', line 168

def investigation_type
  response = OpenTox::Backend::FourStore.query "SELECT ?o FROM <#{uri}> WHERE {?s <#{RDF::TB}hasInvType> ?o}", "application/json"
  result = JSON.parse(response)
  type = result["results"]["bindings"].map{|n| n["o"]["value"]}[0]
end

#investigation_uriString

Returns full investigation URI: investigation service uri + investigation

Returns:

  • (String)

    full investigation URI: investigation service uri + investigation


9
10
11
# File 'helper.rb', line 9

def investigation_uri
  to("/investigation/#{params[:id]}") # new in Sinatra, replaces url_for
end

#is_isatab?Boolean

check for investigation type

Returns:

  • (Boolean)

8
9
10
11
12
13
# File 'helper_isatab.rb', line 8

def is_isatab?
  response = OpenTox::Backend::FourStore.query "SELECT ?o WHERE {<#{investigation_uri}> <#{RDF::TB}hasInvType> ?o}", "application/json"
  result = JSON.parse(response)
  type = result["results"]["bindings"].map {|n|  "#{n["o"]["value"]}"}
  type.blank? ? (return true) : (return false)
end

#isa2rdfObject

ISA-TAB to RDF conversion. Preprocess and parse isa-tab files with java isa2rdf


51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
# File 'helper_isatab.rb', line 51

def isa2rdf
  # @note isa2rdf returns correct exit code but error in task
  `cd #{File.dirname(__FILE__)}/java && java -jar -Xmx2048m isa2rdf-cli-1.0.2.jar -d #{tmp} -i #{investigation_uri} -o #{File.join tmp,nt} -t #{$user_service[:uri]} 2> #{File.join tmp,'log'} &`
  if !File.exists?(File.join tmp, nt)
    out = IO.read(File.join tmp, 'log') 
    FileUtils.remove_entry dir
    delete_investigation_policy
    bad_request_error "Could not parse isatab file in '#{params[:file][:filename]}'. Message is:\n #{out}"
  else
    `sed -i 's;http://onto.toxbank.net/isa/tmp/;#{investigation_uri}/;g' #{File.join tmp,nt}`
    investigation_id = `grep "#{investigation_uri}/I[0-9]" #{File.join tmp,nt}|cut -f1 -d ' '`.strip
    `sed -i 's;#{investigation_id.split.last};<#{investigation_uri}>;g' #{File.join tmp,nt}`
    `echo "<#{investigation_uri}> <#{RDF.type}> <#{RDF::OT.Investigation}> ." >>  #{File.join tmp,nt}`
    FileUtils.rm Dir[File.join(tmp,"*.zip")]
    FileUtils.cp Dir[File.join(tmp,"*")], dir
    FileUtils.remove_entry tmp

    # create dashboard cache and empty JSON object
    create_cache

    # next line moved to l.74
    `zip -j #{File.join(dir, "investigation_#{params[:id]}.zip")} #{dir}/*.txt`
    OpenTox::Backend::FourStore.put investigation_uri, File.read(File.join(dir,nt)), "application/x-turtle"
    
    task = OpenTox::Task.run("Processing raw data",investigation_uri) do
      sleep 30 # wait until metadata imported and preview requested
      `cd #{File.dirname(__FILE__)}/java && java -jar -Xmx2048m isa2rdf-cli-1.0.2.jar -d #{dir} -i #{investigation_uri} -a #{File.join dir} -o #{File.join dir,nt} -t #{$user_service[:uri]} 2> #{File.join dir,'log'} &`
      # get rdfs
      sleep 10 # wait until first file is generated
      rdfs = Dir["#{dir}/*.rdf"]
      $logger.debug "rdfs:\t#{rdfs}\n"
      unless rdfs.blank?
        sleep 1
        rdfs = Dir["#{dir}/*.rdf"].reject!{|rdf| rdf.blank?}
      else
        # get ntriples datafiles
        datafiles = Dir["#{dir}/*.nt"].reject!{|file| file =~ /#{nt}$|ftpfiles\.nt$|modified\.nt$|isPublished\.nt$|isSummarySearchable\.nt/}
        $logger.debug "datafiles:\t#{datafiles}"
        unless datafiles.blank?
          # split extra datasets
          datafiles.each{|dataset| `split -a 4 -d -l 100000 '#{dataset}' '#{dataset}_'` unless File.zero?(dataset)}
          chunkfiles = Dir["#{dir}/*.nt_*"]
          $logger.debug "chunkfiles:\t#{chunkfiles}"
          
          # append datasets to investigation graph
          chunkfiles.sort{|a,b| a <=> b}.each do |dataset|
            OpenTox::Backend::FourStore.post investigation_uri, File.read(dataset), "application/x-turtle"
            sleep 10 # time it takes to import and reindex
            set_modified
            File.delete(dataset)
          end
        end # datafiles
      end # rdfs

      # update JSON object with dashboard values
      dashboard_cache
      link_ftpfiles
      # remove subtask uri from metadata
      OpenTox::Backend::FourStore.update "WITH <#{investigation_uri}>
      DELETE { <#{investigation_uri}> <#{RDF::TB.hasSubTaskURI}> ?o}
      WHERE {<#{investigation_uri}> <#{RDF::TB.hasSubTaskURI}> ?o}"
      set_modified
      investigation_uri # result uri for subtask
    end # task
    # update metadata with subtask uri
    triplestring = "<#{investigation_uri}> <#{RDF::TB.hasSubTaskURI}> <#{task.uri}> ."
    OpenTox::Backend::FourStore.post investigation_uri, triplestring, "application/x-turtle"
    investigation_uri
  end
end

#kill_isa2rdfObject

kill isa2rdf pids if delete or put


16
17
18
19
20
21
# File 'helper_isatab.rb', line 16

def kill_isa2rdf
  pid = []
  pid << `ps x|grep #{params[:id]}|grep java|grep -v grep|awk '{ print $1 }'`.split("\n")
  $logger.debug "isa2rdf PIDs for current investigation:\t#{pid.flatten}\n"
  pid.flatten.each{|p| `kill #{p.to_i}`} unless pid.blank?
end

link data files from FTP to investigation dir


188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
# File 'helper_isatab.rb', line 188

def link_ftpfiles
  ftpfiles = get_ftpfiles
  datafiles = get_datafiles
  return "" if ftpfiles.empty? || datafiles.empty?
  remove_symlinks
  datafiles = datafiles.collect { |f| f.gsub(/(ftp:\/\/|)#{URI($investigation[:uri]).host}\//,"") }
  tolink = (ftpfiles.keys & ( datafiles - Dir.entries(dir).reject{|entry| entry =~ /^\.{1,2}$/}))
  tolink.each do |file|
    `ln -s "/home/ftpusers/#{Authorization.get_user}/#{file}" "#{dir}/#{file.gsub("/","_")}"`
    @datahash[file].each do |data_node|
      OpenTox::Backend::FourStore.update "INSERT DATA { GRAPH <#{investigation_uri}> {<#{data_node}> <#{RDF::ISA.hasDownload}> <#{investigation_uri}/files/#{file.gsub("/","_")}>}}"
      ftpfilesave = "<#{data_node}> <#{RDF::ISA.hasDownload}> <#{investigation_uri}/files/#{file.gsub("/","_")}> ."
      File.open(File.join(dir, "ftpfiles.nt"), 'a') {|f| f.write("#{ftpfilesave}\n") }
    end
  end
  return tolink
end

link ftp files by params


95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
# File 'helper_unformatted.rb', line 95

def link_ftpfiles_by_params
  ftpfiles = get_ftpfiles
  paramfiles = params[:ftpFile].gsub(/,\s/, ",").split(",")
  # remove existing from dir
  remove_symlinks
  paramfiles.each do |file|
    bad_request_error "'#{file}' is missing. Please upload to your ftp directory first." if !ftpfiles.include?(file)
    `ln -s "/home/ftpusers/#{Authorization.get_user}/#{file}" "#{dir}/#{file.gsub("/","_")}"` unless File.exists?("#{dir}/#{file}") 
    ftpfilesave = "<#{investigation_uri}> <#{RDF::ISA.hasDownload}> <#{investigation_uri}/files/#{file.gsub("/","_")}> ."
    File.open(File.join(dir, "ftpfiles.nt"), 'a') {|f| f.write("#{ftpfilesave}\n") }
    # update backend
    OpenTox::Backend::FourStore.update "WITH <#{investigation_uri}>
    DELETE { <#{investigation_uri}> <#{RDF::ISA.hasDownload}> ?o} WHERE {<#{investigation_uri}> <#{RDF::ISA.hasDownload}> ?o};
    INSERT DATA { GRAPH <#{investigation_uri}> {<#{investigation_uri}> <#{RDF::ISA.hasDownload}> <#{investigation_uri}/files/#{file.gsub("/","_")}>}}"
  end
end

#ntString

Returns N-Triples file name

Returns:

  • (String)

    N-Triples file name


45
46
47
# File 'helper.rb', line 45

def nt
  "#{params[:id]}.nt"
end

#params2rdfObject

Parameters to RDF conversion.


39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
# File 'helper_unformatted.rb', line 39

def params2rdf
  #$logger.debug params.inspect
  FileUtils.cp(File.join(File.dirname(File.expand_path __FILE__), "template", "metadata.nt"), File.join(tmp,nt))
   = File.read(File.join(tmp,nt)) % {:investigation_uri => investigation_uri,
    :type => params[:type],
    :title => params[:title].strip.gsub("\"", "\\\""),
    :abstract => params[:abstract].strip.gsub("\"", "\\\"").gsub(/\r\n/,"\\n"), # catch ^M character
    :organisation => params[:owningOrg],
    :pi => get_pi
  }
  if request.env['REQUEST_METHOD'] =~ /POST/
     << "<#{investigation_uri}> <http://purl.org/dc/terms/issued> \"#{Time.new.strftime("%d %b %Y %H:%M:%S %Z")}\" .\n"
  else
    issued = ""
    IO.readlines(File.join(dir,nt)).each{|l| issued << l if l =~ /issued/}
     << issued
  end
  # if several params has different values
  unless params[:owningPro].nil?
    owningPro = params[:owningPro].split(",")
    owningPro.each do |project|
       << "<#{investigation_uri}> <#{RDF::TB}hasProject> <#{project.strip}> .\n"
    end
  end
  authors = params[:authors].split(",")
  authors.each do |author|
     << "<#{investigation_uri}> <#{RDF::TB}hasAuthor> <#{author.strip}> .\n"
  end
  keywords = params[:keywords].split(",")
  keywords.each do |keyword|
     << "<#{investigation_uri}> <#{RDF::TB}hasKeyword> <#{keyword.strip}> .\n"
  end
  unless params[:licenses].nil?
     << "<#{investigation_uri}> <http://purl.org/dc/terms/license> \"#{params[:licenses].strip.gsub("\"", "\\\"").gsub(/\r\n/,"\\n")}\"^^<http://www.w3.org/2001/XMLSchema#string> .\n"
  end
  if params[:file]
     << "<#{investigation_uri}> <#{RDF::TB}hasDownload> <#{investigation_uri}/files/#{params[:file][:filename].gsub(/\s/, "%20")}> .\n"
  end
  if params[:ftpFile]
    ftpData = params[:ftpFile].split(",")
    ftpData.each do |ftp|
       << "<#{investigation_uri}> <#{RDF::TB}hasDownload> <#{investigation_uri}/files/#{ftp.strip.gsub(/\s/, "%20")}> .\n"
    end
    link_ftpfiles_by_params
  else
    remove_symlinks
  end

  File.open(File.join(tmp,nt), 'w'){|f| f.write()}
  FileUtils.cp Dir[File.join(tmp,"*")], dir
  FileUtils.remove_entry tmp
  OpenTox::Backend::FourStore.put investigation_uri, File.read(File.join(dir,nt)), "application/x-turtle"
  investigation_uri
end

#prepare_uploadObject

copy investigation files in tmp subfolder


24
25
26
27
28
29
30
31
# File 'helper_isatab.rb', line 24

def prepare_upload
  locked_error "Processing investigation #{params[:id]}. Please try again later." if File.exists? tmp
  bad_request_error "Please submit data as multipart/form-data" unless request.form_data? 
  # move existing ISA-TAB files to tmp
  FileUtils.mkdir_p tmp
  FileUtils.cp Dir[File.join(dir,"*.txt")], tmp if params[:file]
  FileUtils.cp params[:file][:tempfile], File.join(tmp, params[:file][:filename]) if params[:file]
end

#qfilter(flag, uri) ⇒ String

return uri if related flag is set to “true”.

Returns:

  • (String)

    URI


161
162
163
164
165
# File 'helper.rb', line 161

def qfilter(flag, uri)
  qfilter = OpenTox::Backend::FourStore.query "SELECT ?s FROM <#{uri}> WHERE {?s <#{RDF::TB}#{flag}> ?o FILTER regex(?o, 'true', 'i')}", "application/sparql-results+xml"
  $logger.debug "\ncheck flags: #{flag}:\t#{qfilter.split("\n")[7].gsub(/<binding name="s"><uri>|\/<\/uri><\/binding>/, '').strip}\n"
  qfilter.split("\n")[7].gsub(/<binding name="s"><uri>|\/<\/uri><\/binding>/, '').strip
end

#qlist(mime_type) ⇒ Object

generate URI list.


193
194
195
196
197
# File 'helper.rb', line 193

def qlist mime_type
  list = OpenTox::Backend::FourStore.list mime_type
  service_uri = to("/investigation")
  list.split.keep_if{|v| v =~ /#{service_uri}/}.join("\n")# show all, ignore flags
end

remove symlinks in investigation dir


253
254
255
256
257
# File 'helper.rb', line 253

def remove_symlinks
  Dir["#{tmp}/*"].each{|file| FileUtils.rm(file) if File.symlink?("#{dir}/#{File.basename(file)}")}
  Dir["#{dir}/*"].each{|file| FileUtils.rm(file) if File.symlink?("#{dir}/#{File.basename(file)}")}
  FileUtils.rm(File.join(dir, "ftpfiles.nt")) if File.exists? File.join(dir, "ftpfiles.nt")
end

#replace_cache(string) ⇒ Object


71
72
73
# File 'helper.rb', line 71

def replace_cache string
  File.open(File.join(dashboard), 'w') {|f| f.write(string) }
end

#replace_piObject

replaces pi uri with owner uri (use uri prefix) in i_*vestigation.txt file.


200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
# File 'helper.rb', line 200

def replace_pi
  begin
    user = OpenTox::Authorization.get_user
    #accounturi = OpenTox::RestClientWrapper.get("#{$user_service[:uri]}/user?username=#{user}", nil, {:Accept => "text/uri-list", :subjectid => subjectid}).sub("\n","")
    accounturi = `curl -Lk -X GET -H "Accept:text/uri-list" -H "subjectid:#{RestClientWrapper.subjectid}" #{$user_service[:uri]}/user?username=#{user}`.chomp.sub("\n","")
     = OpenTox::TBAccount.new(accounturi)
    investigation_file = Dir["#{tmp}/i_*.txt"]
    investigation_file.each do |inv_file|
      text = File.read(inv_file, :encoding => "BINARY")
      #replace = text.gsub!(/TBU:U\d+/, account.ns_uri)
      #replace = text.gsub!(/Comment \[Principal Investigator URI\]\t"TBU:U\d+"/ , "Comment \[Owner URI\]\t\"#{account.ns_uri}\"")
      #replace = text.gsub!(/Comment \[Owner URI\]\t"TBU:U\d+"/ , "Comment \[Owner URI\]\t\"#{account.ns_uri}\"")
      replace = text.gsub!(/Comment \[Principal Investigator URI\]\t.*/ , "Comment \[Owner URI\]\t\"#{.ns_uri}\"")
      replace = text.gsub!(/Comment \[Owner URI\]\t.*/ , "Comment \[Owner URI\]\t\"#{.ns_uri}\"")
      File.open(inv_file, "wb") { |file| file.puts replace } if replace
    end
  rescue
    $logger.error "can not replace Principal Investigator to user: #{user} with subjectid: #{RestClientWrapper.subjectid}"
  end
end

#set_flag(flag, value, type = "") ⇒ Object

switch boolean flags in triple store

Parameters:

  • flag (String)

    e.G.: RDF::TB.isPublished, RDF::TB.isSummarySearchable

  • value (Boolean)
  • type (String) (defaults to: "")

    boolean


118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
# File 'helper.rb', line 118

def set_flag flag, value, type = ""
  flagtype = type == "boolean" ? "^^<#{RDF::XSD.boolean}>" : ""
  OpenTox::Backend::FourStore.update "DELETE DATA { GRAPH <#{investigation_uri}> {<#{investigation_uri}> <#{flag}> \"#{!value}\"#{flagtype}}}"
  OpenTox::Backend::FourStore.update "INSERT DATA { GRAPH <#{investigation_uri}> {<#{investigation_uri}> <#{flag}> \"#{value}\"#{flagtype}}}"
  # save flag to file in case of restore or transport backend
  flagsave = "<#{investigation_uri}> <#{flag}> \"#{value}\"#{flagtype} ."
  File.open(File.join(dir, "#{flag.to_s.split("/").last}.nt"), 'w') {|f| f.write(flagsave) }
  newfiles = `cd #{File.dirname(__FILE__)}/investigation; git ls-files -z --others --exclude-standard --directory #{params[:id]}`
  request.env['REQUEST_METHOD'] == "POST" ? action = "created" : action = "modified"
  if newfiles != ""
    newfiles.split("\0").each{|newfile|`cd #{File.dirname(__FILE__)}/investigation && git add "#{newfile}"`}
    `cd #{File.dirname(__FILE__)}/investigation && git commit --allow-empty -am "#{newfiles.gsub("\0"," ::: ")}  #{action} by #{OpenTox::Authorization.get_user}"`
  else
    `cd #{File.dirname(__FILE__)}/investigation && git add "#{params[:id]}/#{flag.to_s.split("/").last}.nt" && git commit --allow-empty -am "#{params[:id]}/#{flag.to_s.split("/").last}.nt  #{action} by #{OpenTox::Authorization.get_user}"` if `cd #{File.dirname(__FILE__)}/investigation && git status -s| cut -c 4-` != ""
  end
end

#set_index(inout = false) ⇒ Object

add or delete investigation_uri from search index at UI

Parameters:

  • inout (Boolean) (defaults to: false)

    true=add, false=delete


155
156
157
# File 'helper.rb', line 155

def set_index inout=false
  OpenTox::RestClientWrapper.method(inout ? "put" : "delete").call "#{$search_service[:uri]}/search/index/investigation?resourceUri=#{CGI.escape(investigation_uri)}",{},{:subjectid => OpenTox::RestClientWrapper.subjectid}
end

#set_modifiedObject

delete all RDF::DC.modified triples and insert new one with current date-time


136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
# File 'helper.rb', line 136

def set_modified
  OpenTox::Backend::FourStore.update "WITH <#{investigation_uri}>
  DELETE { <#{investigation_uri}> <#{RDF::DC.modified}> ?o} WHERE {<#{investigation_uri}> <#{RDF::DC.modified}> ?o};
  INSERT DATA { GRAPH <#{investigation_uri}> {<#{investigation_uri}> <#{RDF::DC.modified}> \"#{Time.new.strftime("%d %b %Y %H:%M:%S %Z")}\"}}"
  # save last modified to file in case of restore or transport backend
  modsave = "<#{investigation_uri}> <#{RDF::DC.modified}> \"#{Time.new.strftime("%d %b %Y %H:%M:%S %Z")}\" ." 
  File.open(File.join(dir, "modified.nt"), 'w') {|f| f.write(modsave) }
  newfiles = `cd #{File.dirname(__FILE__)}/investigation; git ls-files -z --others --exclude-standard --directory #{params[:id]}`
  request.env['REQUEST_METHOD'] == "POST" ? action = "created" : action = "modified"
  if newfiles != ""
    newfiles.split("\0").each{|newfile| `cd #{File.dirname(__FILE__)}/investigation && git add "#{newfile}"`}
    `cd #{File.dirname(__FILE__)}/investigation && git commit --allow-empty -am "#{newfiles.gsub("\0"," ::: ")}  #{action} by #{OpenTox::Authorization.get_user}"`
  else
    `cd #{File.dirname(__FILE__)}/investigation && git commit --allow-empty -am "#{params[:id]}/modified.nt  #{action} by #{OpenTox::Authorization.get_user}"` if `cd #{File.dirname(__FILE__)}/investigation && git status -s| cut -c 4-` != ""
  end
end

#tmpString

Returns absolute investigation dir/tmp path

Returns:

  • (String)

    absolute investigation dir/tmp path


35
36
37
# File 'helper.rb', line 35

def tmp
  File.join dir,"tmp"
end

#uri_listString

Returns uri-list of files in investigation folder

Returns:


14
15
16
17
18
19
20
21
22
23
24
25
26
27
# File 'helper.rb', line 14

def uri_list
  params[:id] ? d = "./investigation/#{params[:id]}/*" : d = "./investigation/*"
  uris = Dir[d].collect{|f| to(f.sub(/\.\//,'')) }
  uris.collect!{|u| u.sub(/(\/#{params[:id]}\/)/,'\1isatab/')} if params[:id]
  uris.collect!{|u| u.sub(/(\/isatab\/)/,'/files/')} if params[:id] && File.read(File.join(dir,nt)).match("hasInvType")
  uris.delete_if{|u| u.match(/_policies$/)}
  # ID.nt file is never a isatab file;
  # never use of ID.nt, deny view ?
  #uris.delete_if{|u| u.match(/tmp$|cache$|log$|modified\.nt$|isPublished\.nt$|isSummarySearchable\.nt$|ftpfiles\.nt$/)}
  uris.delete_if{|u| u.match(/tmp$|cache$|log$|\.nt$/)}
  uris.map!{ |u| u.gsub(" ", "%20") }
  uris.map!{ |u| File.symlink?("#{dir}/#{File.basename(u)}") ? u.gsub("/isatab/", "/files/") : u}
  uris.compact.sort.join("\n") + "\n"
end

#validate_params_uri(param, value) ⇒ Object

validates parameters uri must be a toxbank user service uri


24
25
26
27
28
29
# File 'helper_unformatted.rb', line 24

def validate_params_uri(param, value)
  keys = ["owningOrg", "owningPro", "authors", "keywords"]
  if keys.include?(param.to_s)
    (value.uri? && value =~ /toxbank/) ? (return true) : (return false)
  end
end