4 Joshua Dawes - CERN, CMS - The University of Manchester 6 Upload script wrapper - controls the automatic update system. 8 Note: the name of the file follows a different convention to the others because it should be the same as the current upload script name. 10 Takes user arguments and passes them to the main upload module CondDBFW.uploads, once the correct version exists. 12 1. Ask the server corresponding to the database we're uploading to which version of CondDBFW it has (query the /conddbfw_version/ url). 13 2. Decide which directory that we can write to - either the current local directory, or /tmp/random_string/. 14 3. Pull the commit returned from the server into the directory from step 2. 15 4. Invoke the CondDBFW.uploads module with the arguments given to this script. 18 from __future__
import print_function
21 from StringIO
import StringIO
34 Queries the server-side for the commit hash it is currently using. 35 Note: this is the commit hash used by /data/services/common/CondDBFW on the server-side. 37 request = pycurl.Curl()
38 request.setopt(request.CONNECTTIMEOUT, 60)
39 user_agent =
"User-Agent: ConditionWebServices/1.0 python/%d.%d.%d PycURL/%s" % (sys.version_info[ :3 ] + (pycurl.version_info()[1],))
40 request.setopt(request.USERAGENT, user_agent)
42 request.setopt(request.SSL_VERIFYPEER, 0)
43 request.setopt(request.SSL_VERIFYHOST, 0)
44 response_buffer = StringIO()
45 request.setopt(request.WRITEFUNCTION, response_buffer.write)
46 request.setopt(request.URL, url +
"conddbfw_version/")
48 return json.loads(response_buffer.getvalue())
52 Gets the commit hash used by the local repository CondDBFW/.git/. 54 directory = os.path.abspath(
"CondDBFW")
59 commit_hash_file_handle = open(os.path.join(directory,
".commit_hash"),
"r") 60 commit_hash = commit_hash_file_handle.read().strip() 63 if len(commit_hash) != 40:
64 print(
"Commit hash found is not valid. Must be 40 characters long.")
75 Finds out which directory we can safely use - either CondDBFW/ or a temporary directory. 79 handle = open(os.path.join(default_directory,
"test_file"),
"w")
82 os.remove(os.path.join(default_directory,
"test_file"))
83 sys.path.insert(0, default_directory)
84 return default_directory
87 new_path = os.path.join(
"tmp", commit_hash[0:10])
88 if not(os.path.exists(new_path)):
90 sys.path.insert(0, new_path)
94 exit(
"Can't find anywhere to pull the new code base to.")
96 horizontal_rule =
"="*60
100 Pulls CondDBFW from the git repository specified by the upload server. 103 target = os.path.abspath(target_directory)
104 sys.path.append(target)
105 conddbfw_directory = os.path.join(target,
"CondDBFW")
106 git_directory = os.path.join(conddbfw_directory,
".git")
107 if not(os.path.exists(conddbfw_directory)):
108 os.mkdir(conddbfw_directory)
111 force_pull =
str(raw_input(
"CondDBFW directory isn't empty - empty it, and update to new version? [y/n] "))
112 if force_pull ==
"y":
116 os.mkdir(conddbfw_directory)
118 print(
"Pulling code back from repository...")
119 print(horizontal_rule)
121 run_in_shell(
"git --git-dir=%s clone %s CondDBFW" % (git_directory, repository_url), shell=
True)
124 run_in_shell(
"cd %s && git checkout --force -b version_used %s" % (conddbfw_directory, hash), shell=
True)
127 hash_file_handle = open(os.path.join(conddbfw_directory,
".commit_hash"),
"w")
128 hash_file_handle.write(hash)
129 hash_file_handle.close()
132 shutil.rmtree(git_directory)
134 print(horizontal_rule)
135 print(
"Creating local log directories (if required)...")
136 if not(os.path.exists(os.path.join(target,
"upload_logs"))):
137 os.mkdir(os.path.join(target,
"upload_logs"))
138 if not(os.path.exists(os.path.join(target,
"server_side_logs"))):
139 os.mkdir(os.path.join(target,
"server_side_logs"))
140 print(
"Finished with log directories.")
141 print(
"Update of CondDBFW complete.")
143 print(horizontal_rule)
149 Runs string-based commands in the shell and returns the result. 151 out = subprocess.PIPE
if kwargs.get(
"stdout") ==
None else kwargs.get(
"stdout")
153 if new_kwargs.get(
"stdout"):
154 del new_kwargs[
"stdout"]
155 process = subprocess.Popen(*popenargs, stdout=out, **new_kwargs)
156 stdout = process.communicate()[0]
157 returnCode = process.returncode
158 cmd = kwargs.get(
'args')
162 raise subprocess.CalledProcessError(returnCode, cmd)
167 Imports CondDBFW.uploads and runs the upload with the upload metadata obtained. 170 import CondDBFW.uploads
as uploads
171 except Exception
as e:
172 traceback.print_exc()
173 exit(
"CondDBFW or one of its dependencies could not be imported.\n"\
174 +
"If the CondDBFW directory exists, you are likely not in a CMSSW environment.")
177 result = uploader.upload()
181 parser = argparse.ArgumentParser(prog=
"cmsDbUpload client", description=
"CMS Conditions Upload Script in CondDBFW.")
183 parser.add_argument(
"--sourceDB", type=str, help=
"DB to find Tags, IOVs + Payloads in.", required=
False)
186 parser.add_argument(
"--inputTag", type=str,\
187 help=
"Tag to take IOVs + Payloads from in --sourceDB.", required=
False)
188 parser.add_argument(
"--destinationTag", type=str,\
189 help=
"Tag to copy IOVs + Payloads to in --destDB.", required=
False)
190 parser.add_argument(
"--destinationDatabase", type=str,\
191 help=
"Database to copy IOVs + Payloads to.", required=
False)
192 parser.add_argument(
"--since", type=int,\
193 help=
"Since to take IOVs from.", required=
False)
194 parser.add_argument(
"--userText", type=str,\
195 help=
"Description of --destTag (can be empty).")
198 parser.add_argument(
"--metadataFile",
"-m", type=str, help=
"Metadata file to take metadata from.", required=
False)
200 parser.add_argument(
"--debug", required=
False, action=
"store_true")
201 parser.add_argument(
"--verbose", required=
False, action=
"store_true")
202 parser.add_argument(
"--testing", required=
False, action=
"store_true")
203 parser.add_argument(
"--fcsr-filter", type=str, help=
"Synchronization to take FCSR from for local filtering of IOVs.", required=
False)
205 parser.add_argument(
"--netrc", required=
False)
207 parser.add_argument(
"--hashToUse", required=
False)
209 parser.add_argument(
"--server", required=
False)
211 parser.add_argument(
"--review-options", required=
False, action=
"store_true")
213 command_line_data = parser.parse_args()
216 server_alias_to_url = {
217 "prep" :
"https://cms-conddb-dev.cern.ch/cmsDbCondUpload/",
218 "prod" :
"https://cms-conddb.cern.ch/cmsDbCondUpload/",
219 None :
"https://cms-conddb.cern.ch/cmsDbCondUpload/" 224 if command_line_data.server
in server_alias_to_url.keys():
225 command_line_data.server = server_alias_to_url[command_line_data.server]
229 netrc_file = command_line_data.netrc
230 netrc_authenticators = netrc.netrc(netrc_file).authenticators(
"ConditionUploader")
231 if netrc_authenticators ==
None:
232 print(
"Your netrc file must contain the key 'ConditionUploader'.")
233 manual_input = raw_input(
"Do you want to try to type your credentials? ")
234 if manual_input ==
"y":
236 username = raw_input(
"Username: ")
237 password = getpass.getpass(
"Password: ")
241 print(
"Read your credentials from ~/.netrc. If you want to use a different file, supply its name with the --netrc argument.")
242 username = netrc_authenticators[0]
243 password = netrc_authenticators[2]
245 print(
"Couldn't obtain your credentials (either from netrc or manual input).")
248 command_line_data.username = username
249 command_line_data.password = password
252 command_line_data.destinationTags = {command_line_data.destinationTag:{}}
255 Construct metadata_dictionary: 256 Currently, this is 3 cases: 258 1) An IOV is being appended to an existing Tag with an existing Payload. 259 In this case, we just take all data from the command line. 261 2) No metadata file is given, so we assume that ALL upload metadata is coming from the command line. 263 3) A metadata file is given, hence we parse the file, and then iterate through command line arguments 264 since these override the options set in the metadata file. 267 if command_line_data.hashToUse !=
None:
268 command_line_data.userText =
"" 269 metadata_dictionary = command_line_data.__dict__
270 elif command_line_data.metadataFile ==
None:
271 command_line_data.userText = command_line_data.userText\
272 if command_line_data.userText !=
None\
273 else str(raw_input(
"Tag's description [can be empty]:"))
274 metadata_dictionary = command_line_data.__dict__
276 metadata_dictionary = json.loads(
"".
join(open(os.path.abspath(command_line_data.metadataFile),
"r").readlines())) 277 metadata_dictionary["username"] = username
278 metadata_dictionary[
"password"] = password
279 metadata_dictionary[
"userText"] = metadata_dictionary.get(
"userText")\
280 if metadata_dictionary.get(
"userText") !=
None\
281 else str(raw_input(
"Tag's description [can be empty]:"))
283 metadata_dictionary[
"server"] = server_alias_to_url[
None]
286 for (option_name, option_value)
in command_line_data.__dict__.items():
288 if option_name !=
"destinationTags":
289 if option_value !=
None or (option_value ==
None and not(option_name
in metadata_dictionary.keys())):
293 metadata_dictionary[option_name] = option_value
295 if option_value != {
None:{}}:
296 metadata_dictionary[
"destinationTags"] = {option_value:{}}
297 elif option_value == {
None:{}}
and not(
"destinationTags" in metadata_dictionary.keys()):
298 metadata_dictionary[
"destinationTags"] = {
None:{}}
300 if command_line_data.review_options:
302 "since" :
"Since of first IOV",
303 "userText" :
"Populated by upload process",
304 "netrc" :
"None given",
305 "fcsr_filter" :
"Don't apply",
306 "hashToUse" :
"Using local SQLite file instead" 308 print(
"Configuration to use for the upload:")
309 for key
in metadata_dictionary:
310 if not(key)
in [
"username",
"password",
"destinationTag"]:
311 value_to_print = metadata_dictionary[key]
if metadata_dictionary[key] !=
None else defaults[key]
312 print(
"\t%s : %s" % (key, value_to_print))
314 if raw_input(
"\nDo you want to continue? [y/n] ") !=
"y":
317 return metadata_dictionary
319 if __name__ ==
"__main__":
324 final_service_url = upload_metadata[
"server"]
330 Todo - case where we don't have write permission in the current directory (local_version == None and hashes don't match) 334 target_directory =
"" 336 if local_version !=
None:
337 if conddbfw_version[
"hash"] == local_version:
339 print(
"No change of version of CondDBFW is required - performing the upload.")
341 sys.path.append(os.path.abspath(os.getcwd()))
342 elif conddbfw_version[
"hash"] != local_version:
345 print(
"The server uses a different version of CondDBFW - changing to commit '%s' of CondDBFW." % conddbfw_version[
"hash"])
346 shell_response =
pull_code_from_git(os.getcwd(), conddbfw_version[
"repo"], conddbfw_version[
"hash"])
350 print(
"No CondDBFW version found locally - pulling one.")
352 shell_response =
pull_code_from_git(target_directory, conddbfw_version[
"repo"], conddbfw_version[
"hash"])
354 import CondDBFW.data_sources
as data_sources
356 upload_metadata[
"sqlite_file"] = upload_metadata.get(
"sourceDB")
359 upload_metadata_argument = {}
360 for (key, value)
in upload_metadata.items():
361 if key !=
"metadata_source":
362 upload_metadata_argument[key] = value
370 if "tmp" in target_directory:
371 print(horizontal_rule)
372 print(
"Removing directory %s..." % target_directory)
374 run_in_shell(
"rm -rf %s" % target_directory, shell=
True)
375 except Exception
as e:
376 print(
"Couldn't delete the directory %s - try to manually delete it." % target_directory)
S & print(S &os, JobReport::InputFile const &f)
def get_directory_to_pull_to(default_directory, commit_hash)
def get_version_info(url)
def run_in_shell(popenargs, kwargs)
def run_upload(parameters)
static std::string join(char **cmd)
def get_local_commit_hash()
def pull_code_from_git(target_directory, repository_url, hash)