From ced6b07f6134776f291d166f6ff4c2e6cad0091e Mon Sep 17 00:00:00 2001 From: belforte Date: Thu, 18 Nov 2021 18:52:34 +0100 Subject: [PATCH] remove code which referenced to old CrabCache. Fix #5113 and fix #5114 --- src/python/CRABClient/ClientUtilities.py | 18 ++------ .../CRABClient/Commands/preparelocal.py | 7 +-- src/python/CRABClient/Commands/submit.py | 10 +--- src/python/CRABClient/JobType/UserTarball.py | 46 ++++--------------- 4 files changed, 16 insertions(+), 65 deletions(-) diff --git a/src/python/CRABClient/ClientUtilities.py b/src/python/CRABClient/ClientUtilities.py index 2f00c885..64704eeb 100644 --- a/src/python/CRABClient/ClientUtilities.py +++ b/src/python/CRABClient/ClientUtilities.py @@ -234,20 +234,10 @@ def uploadlogfile(logger, proxyfilename, taskname=None, logfilename=None, logpat cacheurl = server_info(crabserver=crabserver, subresource='backendurls')['cacheSSL'] logger.info("Uploading log file...") - if 'S3' in cacheurl.upper(): - objecttype = 'clientlog' - uploadToS3(crabserver=crabserver, filepath=logpath, objecttype=objecttype, taskname=taskname, logger=logger) - logfileurl = getDownloadUrlFromS3(crabserver=crabserver, objecttype=objecttype, taskname=taskname, logger=logger) - else: - cacheurldict = {'endpoint': cacheurl, "pycurl": True} - ufc = UserFileCache(cacheurldict) - logger.debug("cacheURL: %s\nLog file name: %s" % (cacheurl, logfilename)) - ufc.uploadLog(logpath, logfilename) - logfileurl = cacheurl + '/logfile?name='+str(logfilename) - if not username: - from CRABClient.UserUtilities import getUsername - username = getUsername(proxyFile=proxyfilename, logger=logger) - logfileurl += '&username='+str(username) + objecttype = 'clientlog' + uploadToS3(crabserver=crabserver, filepath=logpath, objecttype=objecttype, taskname=taskname, logger=logger) + logfileurl = getDownloadUrlFromS3(crabserver=crabserver, objecttype=objecttype, taskname=taskname, logger=logger) + logger.info("Log file URL: %s" % (logfileurl)) logger.info("%sSuccess%s: Log file uploaded successfully." % (colors.GREEN, colors.NORMAL)) diff --git a/src/python/CRABClient/Commands/preparelocal.py b/src/python/CRABClient/Commands/preparelocal.py index 29b10dcc..5bac4839 100644 --- a/src/python/CRABClient/Commands/preparelocal.py +++ b/src/python/CRABClient/Commands/preparelocal.py @@ -68,12 +68,7 @@ def getInputFiles(self): inputsFilename = os.path.join(os.getcwd(), 'InputFiles.tar.gz') if status == 'UPLOADED': - filecacheurl = getColumn(crabDBInfo, 'tm_cache_url') - ufc = CRABClient.Emulator.getEmulator('ufc')({'endpoint' : filecacheurl, "pycurl": True}) - self.logger.debug("Downloading and extracting 'dry-run-sandbox.tar.gz' from %s" % filecacheurl) - ufc.downloadLog('dry-run-sandbox.tar.gz', output=os.path.join(os.getcwd(), 'dry-run-sandbox.tar.gz')) - with tarfile.open('dry-run-sandbox.tar.gz') as tf: - tf.extractall() + raise ClientException('Currently crab upload only works for tasks successfully submitted') elif status == 'SUBMITTED': webdir = getProxiedWebDir(crabserver=self.crabserver, task=taskname, logFunction=self.logger.debug) diff --git a/src/python/CRABClient/Commands/submit.py b/src/python/CRABClient/Commands/submit.py index cba8dace..2314ed5d 100644 --- a/src/python/CRABClient/Commands/submit.py +++ b/src/python/CRABClient/Commands/submit.py @@ -15,8 +15,6 @@ if sys.version_info < (3, 0): from urllib import urlencode, quote - -import CRABClient.Emulator from CRABClient.ClientUtilities import DBSURLS from CRABClient.Commands.SubCommand import SubCommand from CRABClient.ClientMapping import parametersMapping, getParamDefaultValue @@ -379,12 +377,8 @@ def executeTestRun(self, filecacheurl, uniquerequestname): tmpDir = tempfile.mkdtemp() self.logger.info('Created temporary directory for dry run sandbox in %s' % tmpDir) os.chdir(tmpDir) - if 'S3' in filecacheurl.upper(): - downloadFromS3(crabserver=self.crabserver, filepath=os.path.join(tmpDir, 'dry-run-sandbox.tar.gz'), - objecttype='runtimefiles', taskname=uniquerequestname, logger=self.logger) - else: - ufc = CRABClient.Emulator.getEmulator('ufc')({'endpoint' : filecacheurl, "pycurl": True}) - ufc.downloadLog('dry-run-sandbox.tar.gz', output=os.path.join(tmpDir, 'dry-run-sandbox.tar.gz')) + downloadFromS3(crabserver=self.crabserver, filepath=os.path.join(tmpDir, 'dry-run-sandbox.tar.gz'), + objecttype='runtimefiles', taskname=uniquerequestname, logger=self.logger) for name in ['dry-run-sandbox.tar.gz', 'InputFiles.tar.gz', 'CMSRunAnalysis.tar.gz', 'sandbox.tar.gz']: tf = tarfile.open(os.path.join(tmpDir, name)) tf.extractall(tmpDir) diff --git a/src/python/CRABClient/JobType/UserTarball.py b/src/python/CRABClient/JobType/UserTarball.py index 1bc050b6..40e43105 100644 --- a/src/python/CRABClient/JobType/UserTarball.py +++ b/src/python/CRABClient/JobType/UserTarball.py @@ -18,7 +18,6 @@ import hashlib import uuid -import CRABClient.Emulator from CRABClient.ClientMapping import configParametersInfo from CRABClient.JobType.ScramEnvironment import ScramEnvironment from CRABClient.ClientUtilities import colors, BOOTSTRAP_CFGFILE, BOOTSTRAP_CFGFILE_PKL @@ -279,42 +278,15 @@ def upload(self, filecacheurl=None): (archiveName, archiveSize, filecacheurl)) self.logger.debug(msg) - if 'S3' in filecacheurl.upper(): - # use S3 - # generate a 32char hash like UserFileCache used to do - hashkey = calculateChecksum(archiveName, exclude=NEW_USER_SANDBOX_EXCLUSIONS) - # the ".tar.gz" suffix here is forced by other places in the client which add it when - # storing tarball name in task table. Not very elegant to need to hardcode in several places. - cachename = "%s.tar.gz" % hashkey - # current code requires a taskname to extract username. Any dummy one will do - # next version of RESTCache will get username from cmsweb FE headers - uploadToS3(crabserver=self.crabserver, objecttype='sandbox', filepath=archiveName, - tarballname=cachename, logger=self.logger) - else: - # old way using UFC - ufc = CRABClient.Emulator.getEmulator('ufc')({'endpoint' : filecacheurl, "pycurl": True}) - t1 = time.time() - result = ufc.upload(archiveName, excludeList=NEW_USER_SANDBOX_EXCLUSIONS) - ufcSeconds = int(time.time()-t1) - if 'hashkey' not in result: - self.logger.error("Failed to upload archive: %s" % str(result)) - raise CachefileNotFoundException - hashkey = str(result['hashkey']) - # upload a copy to S3 dev as well, just to stress it a bit, this never raises - s3report = testS3upload(self.s3tester, archiveName, hashkey, self.logger) - # report also how long it took uploading to UFC (which surely worked if we are here) - s3report['ufcseconds'] = ufcSeconds - # upload S3 test report to crabcache - reportFile = '/tmp/crabs3report.' + uuid.uuid4().hex - with open(reportFile, 'w') as fp: - json.dump(s3report, fp) - reportName = 'S3-' + s3report['timestamp'] + ':s3report.json' - try: - ufc.uploadLog(reportFile, reportName) - self.logger.debug('Report of S3 upload stored on CrabCache as %s', reportName) - except Exception as e: - self.logger.debug(str(e)) - os.remove(reportFile) + # generate a 32char hash like old UserFileCache used to do + hashkey = calculateChecksum(archiveName, exclude=NEW_USER_SANDBOX_EXCLUSIONS) + # the ".tar.gz" suffix here is forced by other places in the client which add it when + # storing tarball name in task table. Not very elegant to need to hardcode in several places. + cachename = "%s.tar.gz" % hashkey + # current code requires a taskname to extract username. Any dummy one will do + # next version of RESTCache will get username from cmsweb FE headers + uploadToS3(crabserver=self.crabserver, objecttype='sandbox', filepath=archiveName, + tarballname=cachename, logger=self.logger) return hashkey