4 Joshua Dawes - CERN, CMS - The University of Manchester 6 Upload script wrapper - controls the automatic update system. 8 Note: the name of the file follows a different convention to the others because it should be the same as the current upload script name. 10 Takes user arguments and passes them to the main upload module CondDBFW.uploads, once the correct version exists. 12 1. Ask the server corresponding to the database we're uploading to which version of CondDBFW it has (query the /conddbfw_version/ url). 13 2. Decide which directory that we can write to - either the current local directory, or /tmp/random_string/. 14 3. Pull the commit returned from the server into the directory from step 2. 15 4. Invoke the CondDBFW.uploads module with the arguments given to this script. 20 from StringIO
import StringIO
33 Queries the server-side for the commit hash it is currently using. 34 Note: this is the commit hash used by /data/services/common/CondDBFW on the server-side. 36 request = pycurl.Curl()
37 request.setopt(request.CONNECTTIMEOUT, 60)
38 user_agent =
"User-Agent: ConditionWebServices/1.0 python/%d.%d.%d PycURL/%s" % (sys.version_info[ :3 ] + (pycurl.version_info()[1],))
39 request.setopt(request.USERAGENT, user_agent)
41 request.setopt(request.SSL_VERIFYPEER, 0)
42 request.setopt(request.SSL_VERIFYHOST, 0)
43 response_buffer = StringIO()
44 request.setopt(request.WRITEFUNCTION, response_buffer.write)
45 request.setopt(request.URL, url +
"conddbfw_version/")
47 return json.loads(response_buffer.getvalue())
51 Gets the commit hash used by the local repository CondDBFW/.git/. 53 directory = os.path.abspath(
"CondDBFW")
58 commit_hash_file_handle = open(os.path.join(directory,
".commit_hash"),
"r") 59 commit_hash = commit_hash_file_handle.read().strip() 62 if len(commit_hash) != 40:
63 print(
"Commit hash found is not valid. Must be 40 characters long.")
74 Finds out which directory we can safely use - either CondDBFW/ or a temporary directory. 78 handle = open(os.path.join(default_directory,
"test_file"),
"w")
81 os.remove(os.path.join(default_directory,
"test_file"))
82 sys.path.insert(0, default_directory)
83 return default_directory
86 new_path = os.path.join(
"tmp", commit_hash[0:10])
87 if not(os.path.exists(new_path)):
89 sys.path.insert(0, new_path)
93 exit(
"Can't find anywhere to pull the new code base to.")
95 horizontal_rule =
"="*60
99 Pulls CondDBFW from the git repository specified by the upload server. 102 target = os.path.abspath(target_directory)
103 sys.path.append(target)
104 conddbfw_directory = os.path.join(target,
"CondDBFW")
105 git_directory = os.path.join(conddbfw_directory,
".git")
106 if not(os.path.exists(conddbfw_directory)):
107 os.mkdir(conddbfw_directory)
110 force_pull =
str(raw_input(
"CondDBFW directory isn't empty - empty it, and update to new version? [y/n] "))
111 if force_pull ==
"y":
115 os.mkdir(conddbfw_directory)
117 print(
"Pulling code back from repository...")
118 print(horizontal_rule)
120 run_in_shell(
"git --git-dir=%s clone %s CondDBFW" % (git_directory, repository_url), shell=
True)
123 run_in_shell(
"cd %s && git checkout --force -b version_used %s" % (conddbfw_directory, hash), shell=
True)
126 hash_file_handle = open(os.path.join(conddbfw_directory,
".commit_hash"),
"w")
127 hash_file_handle.write(hash)
128 hash_file_handle.close()
131 shutil.rmtree(git_directory)
133 print(horizontal_rule)
134 print(
"Creating local log directories (if required)...")
135 if not(os.path.exists(os.path.join(target,
"upload_logs"))):
136 os.mkdir(os.path.join(target,
"upload_logs"))
137 if not(os.path.exists(os.path.join(target,
"server_side_logs"))):
138 os.mkdir(os.path.join(target,
"server_side_logs"))
139 print(
"Finished with log directories.")
140 print(
"Update of CondDBFW complete.")
142 print(horizontal_rule)
148 Runs string-based commands in the shell and returns the result. 150 out = subprocess.PIPE
if kwargs.get(
"stdout") ==
None else kwargs.get(
"stdout")
152 if new_kwargs.get(
"stdout"):
153 del new_kwargs[
"stdout"]
154 process = subprocess.Popen(*popenargs, stdout=out, **new_kwargs)
155 stdout = process.communicate()[0]
156 returnCode = process.returncode
157 cmd = kwargs.get(
'args')
161 raise subprocess.CalledProcessError(returnCode, cmd)
166 Imports CondDBFW.uploads and runs the upload with the upload metadata obtained. 169 import CondDBFW.uploads
as uploads
170 except Exception
as e:
171 traceback.print_exc()
172 exit(
"CondDBFW or one of its dependencies could not be imported.\n"\
173 +
"If the CondDBFW directory exists, you are likely not in a CMSSW environment.")
176 result = uploader.upload()
180 parser = argparse.ArgumentParser(prog=
"cmsDbUpload client", description=
"CMS Conditions Upload Script in CondDBFW.")
182 parser.add_argument(
"--sourceDB", type=str, help=
"DB to find Tags, IOVs + Payloads in.", required=
False)
185 parser.add_argument(
"--inputTag", type=str,\
186 help=
"Tag to take IOVs + Payloads from in --sourceDB.", required=
False)
187 parser.add_argument(
"--destinationTag", type=str,\
188 help=
"Tag to copy IOVs + Payloads to in --destDB.", required=
False)
189 parser.add_argument(
"--destinationDatabase", type=str,\
190 help=
"Database to copy IOVs + Payloads to.", required=
False)
191 parser.add_argument(
"--since", type=int,\
192 help=
"Since to take IOVs from.", required=
False)
193 parser.add_argument(
"--userText", type=str,\
194 help=
"Description of --destTag (can be empty).")
197 parser.add_argument(
"--metadataFile",
"-m", type=str, help=
"Metadata file to take metadata from.", required=
False)
199 parser.add_argument(
"--debug", required=
False, action=
"store_true")
200 parser.add_argument(
"--verbose", required=
False, action=
"store_true")
201 parser.add_argument(
"--testing", required=
False, action=
"store_true")
202 parser.add_argument(
"--fcsr-filter", type=str, help=
"Synchronization to take FCSR from for local filtering of IOVs.", required=
False)
204 parser.add_argument(
"--netrc", required=
False)
206 parser.add_argument(
"--hashToUse", required=
False)
208 parser.add_argument(
"--server", required=
False)
210 parser.add_argument(
"--review-options", required=
False, action=
"store_true")
212 command_line_data = parser.parse_args()
215 server_alias_to_url = {
216 "prep" :
"https://cms-conddb-dev.cern.ch/cmsDbCondUpload/",
217 "prod" :
"https://cms-conddb.cern.ch/cmsDbCondUpload/",
218 None :
"https://cms-conddb.cern.ch/cmsDbCondUpload/" 223 if command_line_data.server
in server_alias_to_url.keys():
224 command_line_data.server = server_alias_to_url[command_line_data.server]
228 netrc_file = command_line_data.netrc
229 netrc_authenticators = netrc.netrc(netrc_file).authenticators(
"ConditionUploader")
230 if netrc_authenticators ==
None:
231 print(
"Your netrc file must contain the key 'ConditionUploader'.")
232 manual_input = raw_input(
"Do you want to try to type your credentials? ")
233 if manual_input ==
"y":
235 username = raw_input(
"Username: ")
236 password = getpass.getpass(
"Password: ")
240 print(
"Read your credentials from ~/.netrc. If you want to use a different file, supply its name with the --netrc argument.")
241 username = netrc_authenticators[0]
242 password = netrc_authenticators[2]
244 print(
"Couldn't obtain your credentials (either from netrc or manual input).")
247 command_line_data.username = username
248 command_line_data.password = password
251 command_line_data.destinationTags = {command_line_data.destinationTag:{}}
254 Construct metadata_dictionary: 255 Currently, this is 3 cases: 257 1) An IOV is being appended to an existing Tag with an existing Payload. 258 In this case, we just take all data from the command line. 260 2) No metadata file is given, so we assume that ALL upload metadata is coming from the command line. 262 3) A metadata file is given, hence we parse the file, and then iterate through command line arguments 263 since these override the options set in the metadata file. 266 if command_line_data.hashToUse !=
None:
267 command_line_data.userText =
"" 268 metadata_dictionary = command_line_data.__dict__
269 elif command_line_data.metadataFile ==
None:
270 command_line_data.userText = command_line_data.userText\
271 if command_line_data.userText !=
None\
272 else str(raw_input(
"Tag's description [can be empty]:"))
273 metadata_dictionary = command_line_data.__dict__
275 metadata_dictionary = json.loads(
"".
join(open(os.path.abspath(command_line_data.metadataFile),
"r").readlines())) 276 metadata_dictionary["username"] = username
277 metadata_dictionary[
"password"] = password
278 metadata_dictionary[
"userText"] = metadata_dictionary.get(
"userText")\
279 if metadata_dictionary.get(
"userText") !=
None\
280 else str(raw_input(
"Tag's description [can be empty]:"))
282 metadata_dictionary[
"server"] = server_alias_to_url[
None]
285 for (option_name, option_value)
in command_line_data.__dict__.items():
287 if option_name !=
"destinationTags":
288 if option_value !=
None or (option_value ==
None and not(option_name
in metadata_dictionary.keys())):
292 metadata_dictionary[option_name] = option_value
294 if option_value != {
None:{}}:
295 metadata_dictionary[
"destinationTags"] = {option_value:{}}
296 elif option_value == {
None:{}}
and not(
"destinationTags" in metadata_dictionary.keys()):
297 metadata_dictionary[
"destinationTags"] = {
None:{}}
299 if command_line_data.review_options:
301 "since" :
"Since of first IOV",
302 "userText" :
"Populated by upload process",
303 "netrc" :
"None given",
304 "fcsr_filter" :
"Don't apply",
305 "hashToUse" :
"Using local SQLite file instead" 307 print(
"Configuration to use for the upload:")
308 for key
in metadata_dictionary:
309 if not(key)
in [
"username",
"password",
"destinationTag"]:
310 value_to_print = metadata_dictionary[key]
if metadata_dictionary[key] !=
None else defaults[key]
311 print(
"\t%s : %s" % (key, value_to_print))
313 if raw_input(
"\nDo you want to continue? [y/n] ") !=
"y":
316 return metadata_dictionary
318 if __name__ ==
"__main__":
323 final_service_url = upload_metadata[
"server"]
329 Todo - case where we don't have write permission in the current directory (local_version == None and hashes don't match) 333 target_directory =
"" 335 if local_version !=
None:
336 if conddbfw_version[
"hash"] == local_version:
338 print(
"No change of version of CondDBFW is required - performing the upload.")
340 sys.path.append(os.path.abspath(os.getcwd()))
341 elif conddbfw_version[
"hash"] != local_version:
344 print(
"The server uses a different version of CondDBFW - changing to commit '%s' of CondDBFW." % conddbfw_version[
"hash"])
345 shell_response =
pull_code_from_git(os.getcwd(), conddbfw_version[
"repo"], conddbfw_version[
"hash"])
349 print(
"No CondDBFW version found locally - pulling one.")
351 shell_response =
pull_code_from_git(target_directory, conddbfw_version[
"repo"], conddbfw_version[
"hash"])
353 import CondDBFW.data_sources
as data_sources
355 upload_metadata[
"sqlite_file"] = upload_metadata.get(
"sourceDB")
358 upload_metadata_argument = {}
359 for (key, value)
in upload_metadata.items():
360 if key !=
"metadata_source":
361 upload_metadata_argument[key] = value
369 if "tmp" in target_directory:
370 print(horizontal_rule)
371 print(
"Removing directory %s..." % target_directory)
373 run_in_shell(
"rm -rf %s" % target_directory, shell=
True)
374 except Exception
as e:
375 print(
"Couldn't delete the directory %s - try to manually delete it." % target_directory)
S & print(S &os, JobReport::InputFile const &f)
def get_directory_to_pull_to(default_directory, commit_hash)
def get_version_info(url)
def run_in_shell(popenargs, kwargs)
def run_upload(parameters)
static std::string join(char **cmd)
def get_local_commit_hash()
def pull_code_from_git(target_directory, repository_url, hash)