Package Ganga :: Package Runtime :: Module http_server
[hide private]
[frames] | no frames]

Source Code for Module Ganga.Runtime.http_server

  1  from BaseHTTPServer import BaseHTTPRequestHandler 
  2  from Ganga.Core.GangaRepository import getRegistry, RegistryKeyError 
  3  from Ganga.Core.GangaThread import GangaThread 
  4  from Ganga.Utility.util import hostname 
  5  from BaseHTTPServer import HTTPServer 
  6   
  7  import urlparse 
  8  import Ganga.GPI 
  9  from Ganga.GPI import config, jobs 
 10  import time, datetime 
 11  import os 
 12  logger = Ganga.Utility.logging.getLogger() 
 13   
 14  job_status_color = {'new' : '00FFFF', 
 15                      'submitting' : 'FFFFFF', 
 16                      'submitted' : '0000FF', 
 17                      'running' : '008000', 
 18                      'completed' : '006400', 
 19                      'completing' : '006400', 
 20                      'killed' : 'FF0000', 
 21                      'unknown' : '808080', 
 22                      'incomplete' : 'FF00FF', 
 23                      'failed' : 'FF0000'} 
 24   
 25   
 26  subjob_status_color = {'new' : '00ff7d', 
 27                         'submitting' : 'FFFFFF', 
 28                         'submitted' : '00007d', 
 29                         'running' : '00f000', 
 30                         'completed' : '009000', 
 31                         'completing' : '009000', 
 32                         'killed' : '7d0000', 
 33                         'unknown' : '808080', 
 34                         'incomplete' : '7d007d', 
 35                         'failed' : '7D0000'} 
 36   
37 -def getColorString(statuses, jobs=True):
38 39 colorString = "" 40 colorDictionary = job_status_color 41 42 if not jobs: 43 colorDictionary = subjob_status_color 44 45 for status in statuses[:-1]: 46 colorString += '%s|' % colorDictionary[status] 47 colorString += colorDictionary[statuses[-1]] 48 49 return colorString
50
51 -def addQuotes(value):
52 53 trimmedValue = value.strip('\'') 54 55 return '"' + trimmedValue + '"'
56
57 -def get_users_JSON():
58 59 json_users = "{\"basicData\": [[{\"GridName\": \"%s\"}]]}" % config.Configuration.user 60 61 return json_users
62
63 -def get_subjob_JSON(job):
64 65 result = [] 66 result.append("{") 67 68 result.append("\"id\": %s," % addQuotes(job.fqid)) 69 result.append("\"status\": %s," % addQuotes(job.status)) 70 result.append("\"name\": %s," % addQuotes(job.name)) 71 result.append("\"application\": %s," % addQuotes(job.application.__class__.__name__)) 72 result.append("\"backend\": %s," % addQuotes(job.backend.__class__.__name__) ) 73 result.append("\"actualCE\": %s" % addQuotes(job.backend.actualCE)) 74 75 result.append("}") 76 77 return "".join(result)
78
79 -def get_job_JSON(job):
80 81 undefinedAttribute = 'UNDEFINED' 82 83 result = [] 84 result.append("{") 85 86 result.append("\"id\": %s," % addQuotes(job.fqid)) 87 result.append("\"status\": %s," % addQuotes(job.status)) 88 result.append("\"name\": %s," % addQuotes(job.name)) 89 90 #add mon links in the JSON 91 mon_links = job.info.monitoring_links 92 mon_links_html = '' 93 94 if len(mon_links) > 0: 95 number = 1 96 for mon_link in mon_links: 97 #if it is string -> just the path to the link 98 if isinstance(mon_link, str): 99 mon_links_html = mon_links_html + '<div>&nbsp;&nbsp;&nbsp;<a href=\'%s\'>mon_link_%s</a></div>' % (mon_link, number) 100 number+=1 101 elif isinstance(mon_link, tuple): 102 if len(mon_link) == 2: 103 mon_links_html = mon_links_html + '<div>&nbsp;&nbsp;&nbsp;<a href=\'%s\'>%s</a></div>' % (mon_link[0], mon_link[1]) 104 else: 105 mon_links_html = mon_links_html + '<div>&nbsp;&nbsp;&nbsp;<a href=\'%s\'>mon_link_%s</a></div>' % (mon_link[0], number) 106 number+=1 107 108 result.append("\"link\": %s," % addQuotes(mon_links_html)) 109 110 #test for expandable data 111 result.append("\"inputdir\": %s," % addQuotes(job.inputdir)) 112 result.append("\"outputdir\": %s," % addQuotes(job.outputdir)) 113 114 try: 115 result.append("\"submitted\": %s," % addQuotes(str(len(job.subjobs.select(status='submitted'))))) 116 result.append("\"running\": %s," % addQuotes(str(len(job.subjobs.select(status='running'))))) 117 result.append("\"completed\": %s," % addQuotes(str(len(job.subjobs.select(status='completed'))))) 118 result.append("\"failed\": %s," % addQuotes(str(len(job.subjobs.select(status='failed'))))) 119 120 result.append("\"application\": %s," % addQuotes(job.application.__class__.__name__)) 121 result.append("\"backend\": %s," % addQuotes(job.backend.__class__.__name__)) 122 result.append("\"subjobs\": %s," % addQuotes(str(len(job.subjobs)))) 123 result.append("\"uuid\": %s," % addQuotes(job.info.uuid)) 124 125 try: 126 result.append("\"actualCE\": %s," % addQuotes(job.backend.actualCE)) 127 except AttributeError: 128 result.append("\"actualCE\": %s," % addQuotes(undefinedAttribute)) 129 130 except RegistryKeyError: 131 pass 132 133 #remove the last , -> else invalid JSON 134 if result[len(result)-1][-1] == ',': 135 last = result[-1] 136 result = result[:-1] 137 result.append(last[:-1]) 138 139 result.append("}") 140 141 return "".join(result)
142
143 -def get_subjobs_in_time_range(jobid, fromDate=None, toDate=None):
144 145 subjobs = [] 146 147 for subjob in jobs(jobid).subjobs: 148 149 timeCreated = subjob.time.timestamps['new'] 150 151 if fromDate is None and toDate is None: 152 153 subjobs.append(subjob) 154 155 elif fromDate is not None and toDate is not None: 156 157 if timeCreated >= fromDate and timeCreated <= toDate: 158 159 subjobs.append(subjob) 160 161 elif fromDate is not None and toDate is None: 162 163 if timeCreated >= fromDate: 164 165 subjobs.append(subjob) 166 167 return subjobs
168 169
170 -def get_subjobs_JSON(jobid, fromDate=None, toDate=None):
171 172 json_subjobs_strings = [] 173 json_subjobs_strings.append("{\"taskjobs\": [") 174 175 176 subjobs_in_time_range = get_subjobs_in_time_range(jobid, fromDate, toDate) 177 178 for subjob in subjobs_in_time_range: 179 180 json_subjobs_strings.append(get_subjob_JSON(subjob)) 181 json_subjobs_strings.append(",") 182 183 if json_subjobs_strings[-1] == ",": 184 json_subjobs_strings = json_subjobs_strings[:-1] 185 186 json_subjobs_strings.append("]}") 187 188 return "".join(json_subjobs_strings)
189 190
191 -def get_job_infos_in_time_range(fromDate=None, toDate=None):
192 193 job_infos = [] 194 195 for jobInfo in jobs_dictionary.values(): 196 197 timeCreated = jobInfo.getTimeCreated() 198 199 if timeCreated is None: 200 201 if fromDate is None and toDate is None: 202 203 job_infos.append(jobInfo) 204 205 elif fromDate is None and toDate is None: 206 207 job_infos.append(jobInfo) 208 209 elif fromDate is not None and toDate is not None: 210 211 if timeCreated >= fromDate and timeCreated <= toDate: 212 213 job_infos.append(jobInfo) 214 215 elif fromDate is not None and toDate is None: 216 217 if timeCreated >= fromDate: 218 219 job_infos.append(jobInfo) 220 221 return job_infos
222 223 #increment dictionary value method
224 -def increment(d,k):
225 d.setdefault(k,0) 226 d[k] += 1
227
228 -def get_accumulated_subjobs_JSON(subjobs):
229 230 completed_dates = [] 231 232 for subjob in subjobs: 233 if subjob.status == 'completed': 234 completed_dates.append(subjob.time.timestamps['final']) 235 236 if len(completed_dates) == 0 : 237 return '' 238 239 completed_dates.sort() 240 241 start_date = completed_dates[0] 242 end_date = completed_dates[-1] 243 244 interval = (end_date - start_date).seconds 245 246 if interval == 0: 247 interval = 1 248 249 ratio = 100.0/interval 250 251 seconds_from_start = [] 252 253 scale = len(completed_dates)/20 254 255 for completed_date in completed_dates: 256 seconds_from_start.append(((completed_date - start_date).seconds)*ratio) 257 258 values = [] 259 for i in range(len(completed_dates)): 260 values.append(i+1) 261 262 reduced_values = [] 263 reduced_seconds_from_start = [] 264 265 ratio1 = 100.0/len(completed_dates) 266 267 for i in range(len(values)): 268 if scale == 0: 269 reduced_values.append(str(values[i]*ratio1)) 270 reduced_seconds_from_start.append(str(seconds_from_start[i])) 271 elif (i % scale == 0): 272 reduced_values.append(str(values[i]*ratio1)) 273 reduced_seconds_from_start.append(str(seconds_from_start[i])) 274 275 reduced_values.append(str(values[-1]*ratio1)) 276 reduced_seconds_from_start.append(str(seconds_from_start[-1])) 277 278 if interval == 1: 279 reduced_values = ["0", "100"] 280 reduced_seconds_from_start = ["0", "100"] 281 282 returnJSON = "{\"chxl\":\"0:|" + start_date.strftime("%Y-%m-%d %H:%M:%S") + "|" + end_date.strftime("%Y-%m-%d %H:%M:%S") + "\"," + "\"chd\":\"t:" + ','.join(reduced_seconds_from_start) + "|" + ','.join(reduced_values) + "\",\"chxr\":\"1,0," + str(values[-1]) + "\"}" 283 284 return returnJSON
285 286 287
288 -def create_subjobs_graphics(jobid, subjob_attribute, fromDate, toDate):
289 290 subjobs_in_time_range = get_subjobs_in_time_range(jobid, fromDate, toDate) 291 292 if subjob_attribute == 'accumulate': 293 #return some JSON here 294 return get_accumulated_subjobs_JSON(subjobs_in_time_range) 295 296 297 subjobs_attributes = {} 298 299 for subjob in subjobs_in_time_range: 300 301 if subjob_attribute == 'status': 302 increment(subjobs_attributes,subjob.status) 303 304 elif subjob_attribute == 'application': 305 increment(subjobs_attributes,subjob.application.__class__.__name__) 306 307 elif subjob_attribute == 'backend': 308 increment(subjobs_attributes,subjob.backend.__class__.__name__) 309 310 elif subjob_attribute == 'actualCE': 311 increment(subjobs_attributes,subjob.backend.actualCE) 312 313 if subjob_attribute == 'status': 314 return get_pie_chart_json(subjobs_attributes, colors=True, jobs=False) 315 else: 316 return get_pie_chart_json(subjobs_attributes)
317
318 -def get_pie_chart_json(d,colors=False, jobs=False):
319 320 #template = "{\"chd\":\"t:50,50\",\"chl\":\"Hello|World\"}" 321 322 if len(d) == 0: 323 return "{\"chd\":\"t:1\",\"chl\":\"no data\"}" 324 325 keys = [] 326 values = [] 327 328 for k,v in d.iteritems(): 329 keys.append(k) 330 values.append(v) 331 332 keyString = "" 333 334 for key in keys[:-1]: 335 keyString += '%s|' % key 336 keyString += keys[-1] 337 338 339 valueString = "" 340 341 for value in values[:-1]: 342 valueString += '%s,' % str(value) 343 valueString += str(values[-1]) 344 345 result_json = "" 346 347 if colors: 348 colorString = getColorString(keys, jobs) 349 result_json = "{\"chd\":\"t:%s\",\"chl\":\"%s\",\"chco\":\"%s\"}" % (valueString, keyString, colorString) 350 else: 351 result_json = "{\"chd\":\"t:%s\",\"chl\":\"%s\"}" % (valueString, keyString) 352 353 return result_json
354
355 -def create_jobs_graphics(job_attribute, fromDate=None, toDate=None):
356 357 job_infos_in_time_range = get_job_infos_in_time_range(fromDate, toDate) 358 359 jobs_attribute = {} 360 361 for jobInfo in job_infos_in_time_range: 362 363 if job_attribute == 'status': 364 increment(jobs_attribute,jobInfo.getJobStatus()) 365 elif job_attribute == 'application': 366 increment(jobs_attribute,jobInfo.getJobApplication()) 367 elif job_attribute == 'backend': 368 increment(jobs_attribute,jobInfo.getJobBackend()) 369 370 if job_attribute == 'status': 371 return get_pie_chart_json(jobs_attribute, colors=True, jobs=True) 372 else: 373 return get_pie_chart_json(jobs_attribute)
374
375 -def get_jobs_JSON(fromDate=None, toDate=None):
376 377 json_jobs_strings = [] 378 json_jobs_strings.append("{\"user_taskstable\": [") 379 380 job_infos_in_time_range = get_job_infos_in_time_range(fromDate, toDate) 381 382 for jobInfo in job_infos_in_time_range: 383 384 json_jobs_strings.append(jobInfo.getJobJSON()) 385 json_jobs_strings.append(",") 386 387 if json_jobs_strings[-1] == ",": 388 json_jobs_strings = json_jobs_strings[:-1] 389 390 json_jobs_strings.append("]}") 391 392 return "".join(json_jobs_strings)
393
394 -def update_jobs_dictionary():
395 396 reg = getRegistry("jobs") 397 #get the changed jobs 398 changed_ids = reg.pollChangedJobs("WebGUI") 399 400 for job_id in changed_ids: 401 try: 402 403 job = jobs(job_id) 404 405 try: 406 jobs_dictionary[job_id] = JobRelatedInfo(job, job.time.timestamps['new']) 407 except RegistryKeyError: 408 jobs_dictionary[job_id] = JobRelatedInfo(job, None) 409 410 except RegistryKeyError: 411 412 del jobs_dictionary[job_id]
413 414
415 -def fill_jobs_dictionary():
416 417 for job in jobs: 418 try: 419 #get the id -> it could cause RegistryKeyError and the code below will not be executed 420 jobid = job.id 421 422 try: 423 jobs_dictionary[jobid] = JobRelatedInfo(job, job.time.timestamps['new']) 424 except RegistryKeyError: 425 jobs_dictionary[jobid] = JobRelatedInfo(job, None) 426 427 except RegistryKeyError: 428 pass
429 430 #todo remove
431 -def saveProcessDetails():
432 433 if not os.path.exists(tempFilePath): 434 file = open(tempFilePath, 'w') 435 try: 436 file.write(str(os.getpid())) 437 file.write('\n') 438 file.write(Ganga.Utility.util.hostname()) 439 finally: 440 file.close()
441 #todo remove
442 -def getProcessDetails():
443 444 file = open(tempFilePath, 'r') 445 try: 446 lines = file.readlines() 447 pid = lines[0].strip() 448 hostname = lines[1].strip() 449 450 return (pid, hostname) 451 finally: 452 file.close()
453
454 -def getHttpServer():
455 456 success = False 457 port = httpServerStartTryPort 458 server = None 459 460 while not success: 461 462 try: 463 server = HTTPServer((httpServerHost, port), GetHandler) 464 success = True 465 except Exception: 466 port += 1 467 468 return server, port
469
470 -def convertStringToDatetime(timestring):
471 472 time_format = "%Y-%m-%d %H:%M" 473 return datetime.datetime.fromtimestamp(time.mktime(time.strptime(timestring, time_format)))
474
475 -def getFromDateFromTimeRange(timeRange):
476 477 fromDay = None 478 today = datetime.date.today() 479 480 if timeRange == 'lastDay': 481 fromDay = today 482 elif timeRange == 'last2Days': 483 fromDay = today + datetime.timedelta(days=-1) 484 elif timeRange == 'last3Days': 485 fromDay = today + datetime.timedelta(days=-2) 486 elif timeRange == 'lastWeek': 487 fromDay = today + datetime.timedelta(days=-6) 488 elif timeRange == 'last2Weeks': 489 fromDay = today + datetime.timedelta(days=-13) 490 elif timeRange == 'lastMonth': 491 fromDay = today + datetime.timedelta(days=-30) 492 493 fromDateTime = datetime.datetime(fromDay.year, fromDay.month, fromDay.day, 0, 0) 494 return fromDateTime
495 501
502 -class JobRelatedInfo:
503
504 - def __init__(self, job, time_created):
505 506 self.job_json = get_job_JSON(job) 507 self.time_created = time_created 508 self.job_status = job.status 509 self.job_application = job.application.__class__.__name__ 510 self.job_backend = job.backend.__class__.__name__
511
512 - def getJobJSON(self):
513 514 return self.job_json
515
516 - def getTimeCreated(self):
517 518 return self.time_created
519
520 - def getJobStatus(self):
521 522 return self.job_status
523
524 - def getJobApplication(self):
525 526 return self.job_application
527
528 - def getJobBackend(self):
529 530 return self.job_backend
531
532 - def __hash__(self):
533 534 return hash(self.job_json) + hash(self.time_created)
535
536 - def __eq__(self, other):
537 538 return isinstance(other, JobRelatedInfo) and self.job_json == other.job_json and self.time_created == other.time_created
539
540 -class HTTPServerThread(GangaThread):
541
542 - def __init__(self, name):
544
545 - def run(self):
546 547 548 server, port = getHttpServer() 549 server.socket.settimeout(1) 550 reg = getRegistry("jobs") 551 552 """ 553 try: 554 server = HTTPServer(('pclcg35.cern.ch', 1234), GetHandler) 555 server.socket.settimeout(1) 556 except Exception: 557 return 558 559 print "Another Ganga session is already started with --webgui option" 560 process_details = getProcessDetails() 561 562 print reg.repository.get_other_sessions() 563 print "Process id : %s, hostname : %s" % (process_details[0], process_details[1]) 564 565 self.stop() 566 self.unregister() 567 return 568 """ 569 570 print 'Starting web gui monitoring server, please wait ...' 571 572 # initialization 573 574 reg = getRegistry("jobs") 575 #calling here first time will take all jobs 576 reg.pollChangedJobs("WebGUI") 577 #fill jobs dictionary at the begining 578 fill_jobs_dictionary() 579 580 print 'Web gui monitoring server started successfully' 581 print 582 print 'You can monitor your jobs on the following link: ' + getMonitoringLink(port) 583 584 #server.serve_forever() 585 586 try: 587 while not self.should_stop(): 588 server.handle_request() 589 finally: 590 pass
591 #print "stopping HTTP server thread" 592 #os.remove(tempFilePath) 593 #server.server_close() 594 595
596 -class GetHandler(BaseHTTPRequestHandler):
597
598 - def log_message(self, format, *args):
599 600 logger.debug(format % args)
601
602 - def do_GET(self):
603 queryString = self.path.split('?')[1] 604 import cgi 605 qsDict = dict(cgi.parse_qsl(queryString)) 606 query = qsDict['list'] 607 608 fromDate = None 609 toDate = None 610 611 #from and to date are either both selected or both not selected 612 if qsDict.has_key('from') and qsDict.has_key('to'): 613 fromDate = convertStringToDatetime(qsDict['from']) 614 toDate = convertStringToDatetime(qsDict['to']) 615 #if from and to are not selected, it could be timeRange selected 616 elif qsDict.has_key('timerange'): 617 fromDate = getFromDateFromTimeRange(qsDict['timerange']) 618 619 json = '' 620 621 if query == "users": 622 json = get_users_JSON() 623 elif query == "jobs": 624 #update dictionary with the changed jobs 625 update_jobs_dictionary() 626 json = get_jobs_JSON(fromDate, toDate) 627 628 elif query == "subjobs": 629 jobid = int(qsDict['taskmonid']) 630 json = get_subjobs_JSON(jobid, fromDate, toDate) 631 632 elif query == "jobs_statuses": 633 #update dictionary with the changed jobs 634 update_jobs_dictionary() 635 json = create_jobs_graphics('status', fromDate, toDate) 636 637 elif query == "jobs_backends": 638 #update dictionary with the changed jobs 639 update_jobs_dictionary() 640 json = create_jobs_graphics('backend', fromDate, toDate) 641 642 elif query == "jobs_applications": 643 #update dictionary with the changed jobs 644 update_jobs_dictionary() 645 json = create_jobs_graphics('application', fromDate, toDate) 646 647 elif query == "subjobs_statuses": 648 jobid = int(qsDict['taskmonid']) 649 json = create_subjobs_graphics(jobid, 'status', fromDate, toDate) 650 651 elif query == "subjobs_backends": 652 jobid = int(qsDict['taskmonid']) 653 json = create_subjobs_graphics(jobid, 'backend', fromDate, toDate) 654 655 elif query == "subjobs_applications": 656 jobid = int(qsDict['taskmonid']) 657 json = create_subjobs_graphics(jobid, 'application', fromDate, toDate) 658 659 elif query == "subjobs_actualCE": 660 jobid = int(qsDict['taskmonid']) 661 json = create_subjobs_graphics(jobid, 'actualCE', fromDate, toDate) 662 663 elif query == "subjobs_accumulate": 664 jobid = int(qsDict['taskmonid']) 665 json = create_subjobs_graphics(jobid, 'accumulate', fromDate, toDate) 666 667 elif query == "testaccumulation": 668 669 670 json = "{\"totaljobs\": [[{\"TOTAL\": 92}], {\"taskmonid\": \"ganga:e60e5904-e63e-432f-b3df-63ca833cf080:\"}], \"procevents\": [[{\"NEventsPerJob\": 0}], {\"taskmonid\": \"ganga:e60e5904-e63e-432f-b3df-63ca833cf080:\"}], \"succjobs\": [[{\"TOTAL\": 92, \"TOTALEVENTS\": 1365491}], {\"taskmonid\": \"ganga:e60e5904-e63e-432f-b3df-63ca833cf080:\"}], \"meta\": {\"genactivity\": null, \"submissiontype\": null, \"site\": null, \"ce\": null, \"dataset\": null, \"submissiontool\": null, \"fail\": null, \"check\": [\"submitted\"], \"date1\": [\"2010-09-23 15:56:27\"], \"date2\": [\"2010-09-24 15:56:27\"], \"application\": null, \"rb\": null, \"status\": null, \"taskmonid\": [\"ganga:e60e5904-e63e-432f-b3df-63ca833cf080:\"], \"args\": \"<![CDATA[taskmonid=ganga%3Ae60e5904-e63e-432f-b3df-63ca833cf080%3A]]>\", \"grid\": null, \"user\": null, \"task\": null, \"unixname\": null, \"sortby\": [\"activity\"], \"activity\": null, \"exitcode\": null}, \"allfinished\": [[{\"finished\": \"2010-08-13 14:02:18\", \"Events\": 2000}, {\"finished\": \"2010-08-13 14:39:13\", \"Events\": 14997}, {\"finished\": \"2010-08-13 14:39:25\", \"Events\": 14350}, {\"finished\": \"2010-08-13 14:39:58\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:40:03\", \"Events\": 14997}, {\"finished\": \"2010-08-13 14:40:18\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:40:19\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:40:37\", \"Events\": 14997}, {\"finished\": \"2010-08-13 14:40:38\", \"Events\": 14994}, {\"finished\": \"2010-08-13 14:40:52\", \"Events\": 14997}, {\"finished\": \"2010-08-13 14:40:53\", \"Events\": 14996}, {\"finished\": \"2010-08-13 14:40:54\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:41:25\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:41:27\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:41:29\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:41:32\", \"Events\": 14997}, {\"finished\": \"2010-08-13 14:41:32\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:41:34\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:41:35\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:41:43\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:41:44\", \"Events\": 14996}, {\"finished\": \"2010-08-13 14:41:45\", \"Events\": 14997}, {\"finished\": \"2010-08-13 14:41:53\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:41:54\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:41:55\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:41:55\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:41:55\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:41:55\", \"Events\": 14997}, {\"finished\": \"2010-08-13 14:41:55\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:41:59\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:42:03\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:42:03\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:42:04\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:42:06\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:42:07\", \"Events\": 14997}, {\"finished\": \"2010-08-13 14:42:14\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:42:14\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:42:27\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:42:27\", \"Events\": 14995}, {\"finished\": \"2010-08-13 14:42:28\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:42:38\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:42:53\", \"Events\": 14997}, {\"finished\": \"2010-08-13 14:42:54\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:42:57\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:42:57\", \"Events\": 14995}, {\"finished\": \"2010-08-13 14:42:58\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:42:58\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:43:01\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:43:02\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:43:04\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:43:04\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:43:11\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:43:15\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:43:15\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:43:17\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:43:22\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:43:23\", \"Events\": 14996}, {\"finished\": \"2010-08-13 14:43:24\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:43:25\", \"Events\": 14996}, {\"finished\": \"2010-08-13 14:43:28\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:43:32\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:43:36\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:43:36\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:43:39\", \"Events\": 14996}, {\"finished\": \"2010-08-13 14:43:43\", \"Events\": 14996}, {\"finished\": \"2010-08-13 14:43:56\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:43:57\", \"Events\": 14299}, {\"finished\": \"2010-08-13 14:43:57\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:44:04\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:44:15\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:44:15\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:44:34\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:44:35\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:44:35\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:44:35\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:44:36\", \"Events\": 14995}, {\"finished\": \"2010-08-13 14:45:03\", \"Events\": 14997}, {\"finished\": \"2010-08-13 14:45:10\", \"Events\": 14998}, {\"finished\": \"2010-08-13 14:45:25\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:45:26\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:45:45\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:45:50\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:45:50\", \"Events\": 14997}, {\"finished\": \"2010-08-13 14:46:01\", \"Events\": 14997}, {\"finished\": \"2010-08-13 14:46:07\", \"Events\": 14996}, {\"finished\": \"2010-08-13 14:46:14\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:46:23\", \"Events\": 14999}, {\"finished\": \"2010-08-13 14:46:26\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:46:30\", \"Events\": 15000}, {\"finished\": \"2010-08-13 14:47:09\", \"Events\": 14999}, {\"finished\": \"2010-08-13 15:57:09\", \"Events\": 14996}, {\"finished\": \"2010-08-13 16:17:45\", \"Events\": 14997}], {\"taskmonid\": \"ganga:e60e5904-e63e-432f-b3df-63ca833cf080:\"}], \"lastfinished\": [[{\"finished\": \"2010-08-13 16:17:45\"}], {\"taskmonid\": \"ganga:e60e5904-e63e-432f-b3df-63ca833cf080:\"}], \"firststarted\": [[{\"started\": \"2010-08-13 13:51:21\"}], {\"taskmonid\": \"ganga:e60e5904-e63e-432f-b3df-63ca833cf080:\"}]}" 671 672 673 self.send_response(200) 674 self.send_header('Content-Type', 'text/html') 675 self.end_headers() 676 677 jsonp_function = qsDict['jsonp_callback'] 678 result = "%s(%s);" % (jsonp_function, json) 679 self.wfile.write(result) 680 681 return
682 683 jobs_dictionary={} 684 httpServerHost = 'localhost' 685 httpServerStartTryPort = 8080 686 687 #todo remove 688 #import os 689 #tempFilePath = os.path.join(config.Configuration.gangadir, 'process') 690 #saveProcessDetails() 691 #end remove 692
693 -def start_server():
694 695 t = HTTPServerThread("HTTP_monitoring") 696 t.start()
697 698 if __name__ == '__main__': 699 start_server() 700