Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def managePendingInsert(self):
"""
Check on the status of the currently running insert
"""
# --------------------------------------------
# check global queue, and update insert status
self.log(INFO, "insert:%s: still updating" % self.name)
self.log(INFO, "insert:%s: fetching progress reports from global queue..." %
self.name)
self.node.refreshPersistentRequests()
needToInsertManifest = self.insertingManifest
needToInsertIndex = self.insertingIndex
queuedJobs = {}
# for each job on queue that we know, clear it
globalJobs = self.node.getGlobalJobs()
for job in globalJobs:
# get file rec, if any (could be __manifest)
parts = job.id.split("|")
def main():
"""
When this script is executed, it runs the XML-RPC server
"""
import getopt
opts = {'verbosity': fcp.INFO,
'host':xmlrpcHost,
'port':xmlrpcPort,
'fcpHost':fcp.defaultFCPHost,
'fcpPort':fcp.defaultFCPPort,
}
try:
cmdopts, args = getopt.getopt(sys.argv[1:],
"?hv:",
["help", "verbosity=", "host=", "port=",
"fcphost=", "fcpport="])
except getopt.GetoptError:
# print help information and exit:
usage()
sys.exit(2)
output = None
conf = self.config
for sitename in conf.sections():
uri = conf.get(sitename, "uri")
dir = conf.get(sitename, "dir")
hash = conf.get(sitename, "hash")
version = conf.get(sitename, "version")
privatekey = conf.get(sitename, "privatekey")
files = fcp.readdir(dir, gethashes=True)
h = sha.new()
for f in files:
h.update(f['hash'])
hashNew = h.hexdigest()
if hashNew != hash:
log(INFO, "Updating site %s" % sitename)
log(INFO, "privatekey=%s" % privatekey)
noSites = False
try:
res = self.node.put(privatekey,
dir=dir,
name=sitename,
version=version,
usk=True)
log(INFO, "site %s updated successfully" % sitename)
except:
traceback.print_exc()
log(ERROR, "site %s failed to update" % sitename)
conf.set(sitename, "hash", hashNew)
self.saveConfig()
knownrec['hash'] = rec['hash']
knownrec['sizebytes'] = rec['sizebytes']
knownrec['state'] = 'changed'
structureChanged = True
# for backwards compatibility: files which are missing
# the size get the physical size.
if 'sizebytes' not in knownrec:
knownrec['sizebytes'] = rec['sizebytes']
# if structure has changed, gotta sort and save
if structureChanged:
self.needToUpdate = True
self.files.sort(lambda r1,r2: cmp(r1['name'].decode("utf-8", errors="ignore"), r2['name'].decode("utf-8", errors="ignore")))
self.save()
self.log(INFO, "scan: site %s has changed" % self.name)
else:
self.log(INFO, "scan: site %s has not changed" % self.name)
try:
res = self.node.put(privatekey,
dir=dir,
name=sitename,
version=version,
usk=True)
log(INFO, "site %s updated successfully" % sitename)
except:
traceback.print_exc()
log(ERROR, "site %s failed to update" % sitename)
conf.set(sitename, "hash", hashNew)
self.saveConfig()
if noSites:
log(INFO, "No sites needed updating")
files = fcp.readdir(dir, gethashes=True)
h = sha.new()
for f in files:
h.update(f['hash'])
hashNew = h.hexdigest()
if hashNew != hash:
log(INFO, "Updating site %s" % sitename)
log(INFO, "privatekey=%s" % privatekey)
noSites = False
try:
res = self.node.put(privatekey,
dir=dir,
name=sitename,
version=version,
usk=True)
log(INFO, "site %s updated successfully" % sitename)
except:
traceback.print_exc()
log(ERROR, "site %s failed to update" % sitename)
conf.set(sitename, "hash", hashNew)
self.saveConfig()
if noSites:
log(INFO, "No sites needed updating")