tvaLib
Public Member Functions | Public Attributes | List of all members
lib.int.UserPairs Class Reference
Inheritance diagram for lib.int.UserPairs:
lib.int.VehicleSDRs

Public Member Functions

def __init__ (self, objects, methodName='default', kwargs)
 
def __len__ (self)
 
def __getitem__ (self, i)
 
def __setitem__ (self, i, value)
 
def __iter__ (self)
 
def __str__ (self)
 
def __add__ (self, userPairs)
 
def genPairs (self, objects, maxUserPairs=4000, max_distance=50.0, searchRange=50, verbose=0)
 
def genExposure (self, objFrameTable, label='300 frames', frames=300)
 
def importPairs (self, paths, version=None, filename='')
 
def exportPairs (self, path, version=None, filename='')
 
def getInstantCount (self)
 
def getInstantWIndicatorCount (self)
 
def getIntWTTCCount (self)
 
def calculateIndicators (self, predictionMethod, triage=False, threads=1, collisionDistanceThreshold=1.5, timeHorizon=100, chunkSize=100, shortcircuit=True, verbose=0)
 
def computeCrossingsCollisionsWorkerCallback (self, packagedData)
 
def distributeIndicatorWorkerResults (self)
 
def getPointList (self, method=0, percentile=0.15, minimumProbability=0.0, format='points', userType1=None, userType2=None, originLane1=None, originLane2=None, destLane1=None, destLane2=None, kwargs)
 
def genIndicatorDistribution (self, indicator_list=None, dist_type='pdf', bins=range(0, 100, 1), normalise=False, kwargs)
 
def dropSafetyIndicatorsByValue (self, kwargs)
 
def constrainToZone (self, zone)
 
def garbageCollectObjectData (self)
 
def repopulateObjectData (self, objects, graceful=True)
 
def merge (self, source)
 

Public Attributes

 interactionCount
 Static input. More...
 
 intWTTCmap
 
 skipped_rate_none
 
 skipped_rate_part
 
 skipped_rate_full
 
 skipped_rate_avg
 Store data. More...
 
 methodName
 
 detectedSize
 Determine nearest distance. More...
 
 usedSize
 
 data
 Prune list down. More...
 
 chunks
 Enable and ensure pickling support of instance methods for multithreading. More...
 
 chunk
 Process. More...
 
 data_
 For each unmerged result, merge data. More...
 
 verbose
 Progress update. More...
 
 prog
 

Detailed Description

Definition at line 300 of file int.py.

Constructor & Destructor Documentation

◆ __init__()

def lib.int.UserPairs.__init__ (   self,
  objects,
  methodName = 'default',
  kwargs 
)
Create all pairs of two co-existing road users
    TODO: add test to compute categories?

Definition at line 301 of file int.py.

301  def __init__(self, objects, methodName='default', **kwargs):
302  ''' Create all pairs of two co-existing road users
303  TODO: add test to compute categories?
304  '''
305 
306  self.interactionCount = 0
307  self.intWTTCmap = []
308  self.skipped_rate_none = 0
309  self.skipped_rate_part = 0
310  self.skipped_rate_full = 0
311  self.skipped_rate_avg = 0
312  self.methodName = methodName
313  self.detectedSize = 0
314  self.usedSize = 0
315 
316 
317  self.genPairs(objects, **kwargs)
318 

Member Function Documentation

◆ __add__()

def lib.int.UserPairs.__add__ (   self,
  userPairs 
)

Definition at line 324 of file int.py.

324  def __add__(self, userPairs):
325  self.data += userPairs.data
326  self.interactionCount += userPairs.interactionCount
327  self.intWTTCmap += userPairs.intWTTCmap
328  self.skipped_rate_none += userPairs.skipped_rate_none
329  self.skipped_rate_part += userPairs.skipped_rate_part
330  self.skipped_rate_full += userPairs.skipped_rate_full
331  self.skipped_rate_avg += userPairs.skipped_rate_avg
332  self.methodName = userPairs.methodName
333  try:
334  self.detectedSize += userPairs.detectedSize
335  self.usedSize += userPairs.usedSize
336  except: pass
337 

◆ __getitem__()

def lib.int.UserPairs.__getitem__ (   self,
  i 
)

Definition at line 320 of file int.py.

320  def __getitem__(self, i): return self.data[i]

◆ __iter__()

def lib.int.UserPairs.__iter__ (   self)

Definition at line 322 of file int.py.

322  def __iter__(self): return iter(self.data)

◆ __len__()

def lib.int.UserPairs.__len__ (   self)

Definition at line 319 of file int.py.

319  def __len__(self): return len(self.data)

◆ __setitem__()

def lib.int.UserPairs.__setitem__ (   self,
  i,
  value 
)

Definition at line 321 of file int.py.

321  def __setitem__(self, i, value): self.data[i] = value; return True

◆ __str__()

def lib.int.UserPairs.__str__ (   self)

Definition at line 323 of file int.py.

323  def __str__(self): return 'List of interactions (currently storing'+len(self.data)+')'

◆ calculateIndicators()

def lib.int.UserPairs.calculateIndicators (   self,
  predictionMethod,
  triage = False,
  threads = 1,
  collisionDistanceThreshold = 1.5,
  timeHorizon = 100,
  chunkSize = 100,
  shortcircuit = True,
  verbose = 0 
)
Process indicator calculation with support for multithreading. This function splits the userPairs load into chunks and multithreads the calculation.

    Use this function's shortcircuit flag (usually called as a manual override) to disable multithreading entirely if it is causing problems or if the callback needs to be debugged.

Definition at line 409 of file int.py.

409  def calculateIndicators(self, predictionMethod, triage=False, threads=1, collisionDistanceThreshold=1.5, timeHorizon=100, chunkSize=100, shortcircuit=True, verbose=0):
410  ''' Process indicator calculation with support for multithreading. This function splits the userPairs load into chunks and multithreads the calculation.
411 
412  Use this function's shortcircuit flag (usually called as a manual override) to disable multithreading entirely if it is causing problems or if the callback needs to be debugged.
413  '''
414 
415  tvaLib.prepareInstanceMethodForPickling()
416 
417 
418  self.chunks = int(m.ceil(len(self)/float(chunkSize)))
419  self.chunk = 0
420  self.data_ = []
421  self.verbose = verbose
422  if(self.verbose): self.prog = tvaLib.ProgressBar(0, self.chunks+1, 77)
423  if(self.verbose == 1): self.prog.updateAmount(self.chunk)
424  #verbose = multiprocessing.Value('i', verbose)
425  #timeHorizon = multiprocessing.Value('i', timeHorizon)
426  #collisionDistanceThreshold = multiprocessing.Value('d', collisionDistanceThreshold)
427  #import sys; import dev.memoryUsage as memUs; print('Memory usage in bytes; referential: {0}, real: {1}'.format(sys.getsizeof(predictionMethod['prediction'].data),memUs.asizeof(predictionMethod['prediction'].data)))
428 
429  while self.chunk < self.chunks:
430  # Manual overide to shortcircuit multithreadeading if it is causing problems
431  if(shortcircuit or threads==1):
432  if(self.chunk == self.chunks-1): terminatorIx = len(self)
433  else: terminatorIx = int((self.chunk+1)*chunkSize)
434  packagedData = computeCrossingsCollisionsWorker(self[int(self.chunk*chunkSize):terminatorIx], self.chunk*chunkSize, predictionMethod, triage, collisionDistanceThreshold, timeHorizon, verbose=verbose)
435  self.computeCrossingsCollisionsWorkerCallback(packagedData)
436  self.chunk += 1
437  # Multithreading
438  else:
439  pool = multiprocessing_Pool(threads)
440  for thread in range(threads):
441  if(self.chunk >= self.chunks): continue
442  elif(self.chunk == self.chunks-1): terminatorIx = len(self)
443  else: terminatorIx = int((self.chunk+1)*chunkSize)
444  pool.apply_async(computeCrossingsCollisionsWorker, args = (self[int(self.chunk*chunkSize):terminatorIx], self.chunk*chunkSize, predictionMethod, triage, collisionDistanceThreshold, timeHorizon, verbose), callback = self.computeCrossingsCollisionsWorkerCallback)
445  self.chunk += 1
446  pool.close()
447  pool.join()
448 
449 
450  if(verbose >= 2): print(' Distributing indicator calculations in memory...')
451  self.distributeIndicatorWorkerResults()
452  return True
453 
def computeCrossingsCollisionsWorker(userPairs, objOffset, predictionMethod, triage=False, collisionDistanceThreshold=1.8, timeHorizon=100, verbose=0)
Definition: int.py:242

◆ computeCrossingsCollisionsWorkerCallback()

def lib.int.UserPairs.computeCrossingsCollisionsWorkerCallback (   self,
  packagedData 
)
Perform end of chunk tasks: data sotrage, progress updates, and 
    compilation of statistics.
    
    Store indicator caluclations temporarily to self.data_ to be
    handled by distributeIndicatorWorkerResults() syncronously when all
    chunks have been computed. 
    
    packagedData contains both results and skipped_rates from 
    computeCrossingsCollisionsWorker() (It's a list) 

Definition at line 454 of file int.py.

454  def computeCrossingsCollisionsWorkerCallback(self, packagedData):
455  ''' Perform end of chunk tasks: data sotrage, progress updates, and
456  compilation of statistics.
457 
458  Store indicator caluclations temporarily to self.data_ to be
459  handled by distributeIndicatorWorkerResults() syncronously when all
460  chunks have been computed.
461 
462  packagedData contains both results and skipped_rates from
463  computeCrossingsCollisionsWorker() (It's a list) '''
464 
465  self.data_ += packagedData[0]
466 
467  if(len(packagedData[1])):
468  oldTotal = sum([self.skipped_rate_none, self.skipped_rate_part, self.skipped_rate_full])
469  self.skipped_rate_avg = (self.skipped_rate_avg*oldTotal+sum(packagedData[1]))/(oldTotal+len(packagedData[1]))
470  for sk in packagedData[1]:
471  if(sk <= 0): self.skipped_rate_none += 1
472  elif(sk >= 0): self.skipped_rate_part += 1
473  else: self.skipped_rate_full += 1
474 
475  if(self.verbose >= 1):
476  if(self.verbose == 1): self.prog.updateAmount(self.chunk)
477  elif(self.verbose >= 2): print(' Chunk '+str(self.chunk)+' of '+str(self.chunks)+' chunks completed.')
478  return True
479 

◆ constrainToZone()

def lib.int.UserPairs.constrainToZone (   self,
  zone 
)
Drop any collision points of any user pairs outside of given zone. 

Definition at line 664 of file int.py.

664  def constrainToZone(self, zone):
665  ''' Drop any collision points of any user pairs outside of given zone. '''
666  drop_length = 0
667  for userPair in self:
668  drop_length += userPair.constrainToZone(zone)
669  return drop_length
670 
671 

◆ distributeIndicatorWorkerResults()

def lib.int.UserPairs.distributeIndicatorWorkerResults (   self)
This function is called at the end of calculateIndicators() to
    distribute indicator results stored temporarily by 
    computeCrossingsCollisionsWorkerCallback(). It is necessary to
    perform this step syncrounously. 
    
    self.data_ is a list of unmerged results, one result per user pair.
    self.data_[i]['ix'] is the corresponding user pair (self.data) ix 
    self.data_[i]['CP'] contains the calculated collision points
    self.data_[i]['CZ'] contains the calculated crossing zones

Definition at line 480 of file int.py.

480  def distributeIndicatorWorkerResults(self):
481  ''' This function is called at the end of calculateIndicators() to
482  distribute indicator results stored temporarily by
483  computeCrossingsCollisionsWorkerCallback(). It is necessary to
484  perform this step syncrounously.
485 
486  self.data_ is a list of unmerged results, one result per user pair.
487  self.data_[i]['ix'] is the corresponding user pair (self.data) ix
488  self.data_[i]['CP'] contains the calculated collision points
489  self.data_[i]['CZ'] contains the calculated crossing zones
490  '''
491  if(not self.data_): return False
492 
493  for dIx in range(len(self.data_)):
494  if('CP' in self.data_[dIx]):
495  self[self.data_[dIx]['ix']].collisionPoints = self.data_[dIx]['CP']
496  self.intWTTCmap.append(self.data_[dIx]['ix'])
497  if('CZ' in self.data_[dIx]):
498  self[self.data_[dIx]['ix']].crossingZones = self.data_[dIx]['CZ']
499  self[self.data_[dIx]['ix']].genSafetyIndicators()
500  self[self.data_[dIx]['ix']].genInteractionDescriptors()
501  self.data_ = []
502  return True
503 

◆ dropSafetyIndicatorsByValue()

def lib.int.UserPairs.dropSafetyIndicatorsByValue (   self,
  kwargs 
)
Recursively delete indicators of any user pairs with values below
    indicatorLowerBound and greater than indicatorUpperBound. 

Definition at line 656 of file int.py.

656  def dropSafetyIndicatorsByValue(self, **kwargs):
657  ''' Recursively delete indicators of any user pairs with values below
658  indicatorLowerBound and greater than indicatorUpperBound. '''
659  drop_length = 0
660  for userPair in self:
661  drop_length += userPair.dropSafetyIndicatorsByValue(**kwargs)
662  return drop_length
663 

◆ exportPairs()

def lib.int.UserPairs.exportPairs (   self,
  path,
  version = None,
  filename = '' 
)
Export the contents of this instance. 

Definition at line 389 of file int.py.

389  def exportPairs(self, path, version=None, filename=''):
390  ''' Export the contents of this instance. '''
391  if(not filename): filename = 'userPairs('+self.methodName+').pva'
392  with open(os.path.join(path, filename), 'wb') as output:
393  pickle.dump(version, output, protocol=2)
394  pickle.dump(self.__dict__, output, protocol=2)
395  return True
396 

◆ garbageCollectObjectData()

def lib.int.UserPairs.garbageCollectObjectData (   self)
Use repopulateObjectData() to rebuild object data, i.e. after
    pickling. 

Definition at line 672 of file int.py.

672  def garbageCollectObjectData(self):
673  ''' Use repopulateObjectData() to rebuild object data, i.e. after
674  pickling. '''
675  for i in range(len(self)):
676  #try: self[i].sequenceTrace = self[i].roadUser1.file
677  #except: self[i].sequenceTrace = None
678  self[i].roadUser1 = None
679  self[i].roadUser2 = None
680  return True
681 

◆ genExposure()

def lib.int.UserPairs.genExposure (   self,
  objFrameTable,
  label = '300 frames',
  frames = 300 
)

Definition at line 368 of file int.py.

368  def genExposure(self, objFrameTable, label='300 frames', frames=300):
369  for i in range(len(self.data)):
370  try: self.data[i].exposure
371  except AttributeError: self.data[i].exposure = {}
372  self.data[i].exposure[label] = objFrameTable.getExposureAtInstant(self.data[i].getFirstInstant(), frames=frames)
373  return
374 

◆ genIndicatorDistribution()

def lib.int.UserPairs.genIndicatorDistribution (   self,
  indicator_list = None,
  dist_type = 'pdf',
  bins = range(0,100,1),
  normalise = False,
  kwargs 
)
Generate indicator distribution. 

    Options:
    ========
    dist_type='pdf'  #Probability distribution function (default)
    dist_type='cdf'  #Cumulative distribution function
    dist_type='freq' #Frequency (plain histogram)
    
    Output:
    =======
    Returns two lists: a list of bin coordinates (x-axis) and a list of
    the corresponding pdf/cdf/freq value (y-axis) for the corresponding
    bin.
    [[x_bin_1,x_bin_2,...],[y_bin_value_1,y_bin_value_2,...]]

Definition at line 617 of file int.py.

617  def genIndicatorDistribution(self, indicator_list=None, dist_type='pdf', bins=range(0,100,1), normalise=False, **kwargs):
618  ''' Generate indicator distribution.
619 
620  Options:
621  ========
622  dist_type='pdf' #Probability distribution function (default)
623  dist_type='cdf' #Cumulative distribution function
624  dist_type='freq' #Frequency (plain histogram)
625 
626  Output:
627  =======
628  Returns two lists: a list of bin coordinates (x-axis) and a list of
629  the corresponding pdf/cdf/freq value (y-axis) for the corresponding
630  bin.
631  [[x_bin_1,x_bin_2,...],[y_bin_value_1,y_bin_value_2,...]]
632  '''
633 
634  if(not indicator_list): indicator_list = self.getPointList(**kwargs)
635  if(not indicator_list): return False
636 
637 
638  indicator_value_list = []
639  for i in indicator_list:
640  if(normalise): indicator_value_list.append(i[0]*i[3])
641  else: indicator_value_list.append(i[0])
642  histo = np.histogram(indicator_value_list, bins=bins)
643  histo = [histo[1].tolist(),histo[0].tolist()]
644 
645  try:
646 
647  if(dist_type=='freq'): return histo
648  else:
649  histo[1] = [x/float(np.sum(histo[1])) for x in histo[1]]
650  if(dist_type=='cdf'):
651  for histoIx in range(1, len(histo[1])):
652  histo[1][histoIx] += histo[1][histoIx-1]
653  return histo
654  except ZeroDivisionError: return [[],[]]
655 

◆ genPairs()

def lib.int.UserPairs.genPairs (   self,
  objects,
  maxUserPairs = 4000,
  max_distance = 50.0,
  searchRange = 50,
  verbose = 0 
)
Generate user pairs. 

Definition at line 338 of file int.py.

338  def genPairs(self, objects, maxUserPairs=4000, max_distance=50.0, searchRange=50, verbose=0):
339  ''' Generate user pairs. '''
340  self.data = []
341  num = 0
342  distanceSquared = max_distance**2
343  for ix in range(len(objects)):
344  for jx in range(len(objects))[(ix+1):min(ix+searchRange,len(objects))]:
345  commonTimeInterval = objects[ix].commonTimeInterval(objects[jx])
346  if(not commonTimeInterval.empty()):
347 
348  for t in commonTimeInterval:
349  t1 = t-objects[ix].getFirstInstant()
350  t2 = t-objects[jx].getFirstInstant()
351  try:
352  if(tvaLib.Geo.ppdSearchSquared(objects[ix].getXCoordinates()[t1],objects[ix].getYCoordinates()[t1], objects[jx].getXCoordinates()[t2],objects[jx].getYCoordinates()[t2], distanceSquared)):
353 
354  self.data.append(Interaction(num, commonTimeInterval, objects[ix].num, objects[jx].num, objects[ix], objects[jx], user1Hash=objects[ix].hash, user2Hash=objects[jx].hash))
355  num += 1
356  break
357  except IndexError: pass
358  if(verbose >= 2): print(str(len(self.data))+' candidate user pairs detected')
359  self.detectedSize = len(self.data)
360 
361  if(len(self.data) > maxUserPairs):
362  if(verbose >= 2): print('Sample is too large. Pruning list of user pairs down systematically to '+str(maxUserPairs)+' user pairs.')
363  self.data = self.data[::int(m.ceil(len(self.data)/float(maxUserPairs)))]+self.data[1::int(m.ceil(len(self.data)/float(maxUserPairs)))][:int(maxUserPairs-len(self.data)/int(m.ceil(len(self.data)/float(maxUserPairs))))]
364  self.data.sort(key=lambda x: x.num, reverse=False)
365  self.usedSize = len(self.data)
366  return True
367 

◆ getInstantCount()

def lib.int.UserPairs.getInstantCount (   self)
Return count of instants. 

Definition at line 397 of file int.py.

397  def getInstantCount(self):
398  ''' Return count of instants. '''
399  return sum([userPair.getInstantCount() for userPair in self])
400 

◆ getInstantWIndicatorCount()

def lib.int.UserPairs.getInstantWIndicatorCount (   self)
Return count of instants with indicators. 

Definition at line 401 of file int.py.

401  def getInstantWIndicatorCount(self):
402  ''' Return count of instants with indicators. '''
403  return sum([userPair.getInstantWIndicatorCount() for userPair in self])
404 

◆ getIntWTTCCount()

def lib.int.UserPairs.getIntWTTCCount (   self)
Return count of pairs with indicators. 

Definition at line 405 of file int.py.

405  def getIntWTTCCount(self):
406  ''' Return count of pairs with indicators. '''
407  return len(self.intWTTCmap)
408 

◆ getPointList()

def lib.int.UserPairs.getPointList (   self,
  method = 0,
  percentile = 0.15,
  minimumProbability = 0.0,
  format = 'points',
  userType1 = None,
  userType2 = None,
  originLane1 = None,
  originLane2 = None,
  destLane1 = None,
  destLane2 = None,
  kwargs 
)
Returns safetyPoints as a formated list.

    Input:
    ======
    method=0 Return all indicators
    method=1 Return one indicator per pair according to percentile of indicator
percentile=0 minimum value
percentile=1 maximum value
    method=3 Return one indicator per pair according to percentile of prob
    minimumProbability -> indicators with probability lowaer than this
                  will be ignored
    
    Filters:
    =============
    userType1, userType2     -> Filter based on road user types
    originLane1, originLane2 -> Filter based on road user origin lane
    destLane1, destLane2     -> Filter based on road user destination lane
    
    
    kwargs Input:
    =============
    ptype             -> The type of point: CP|CZ
    alignRestrictions -> [[laneIx,Smin,Smax],...] A series of sections 
                 identified by align index and bounded between
                 curvilinear distances Smin and Smax. Only 
                 indicators originating from within these
                 sections will be returned.
                 This mode requires declaration of objects
    maxIndThreshold   -> Indicator values above this will be ignored.
    aggregateInstants -> For interaction-instants with more than one
                 indicator if the same type, return a weighted
                 average according to probability. Defaults to
                 True.
    
    
    Supported return formats:
    =========================
                  0      1   2   3     4     5     6     7          8
    format='points'  -> [[value1,px1,py1,prob1,time1,num11,num12,descriptor,exposure_list1],[value2,px2,...],...]
    format='columns' -> [[value1,value2,...],[px1,px2,...],...]

Definition at line 504 of file int.py.

504  def getPointList(self, method=0, percentile=0.15, minimumProbability=0.0, format='points', userType1=None, userType2=None, originLane1=None, originLane2=None, destLane1=None, destLane2=None, **kwargs):
505  ''' Returns safetyPoints as a formated list.
506 
507  Input:
508  ======
509  method=0 Return all indicators
510  method=1 Return one indicator per pair according to percentile of indicator
511  percentile=0 minimum value
512  percentile=1 maximum value
513  method=3 Return one indicator per pair according to percentile of prob
514  minimumProbability -> indicators with probability lowaer than this
515  will be ignored
516 
517  Filters:
518  =============
519  userType1, userType2 -> Filter based on road user types
520  originLane1, originLane2 -> Filter based on road user origin lane
521  destLane1, destLane2 -> Filter based on road user destination lane
522 
523 
524  kwargs Input:
525  =============
526  ptype -> The type of point: CP|CZ
527  alignRestrictions -> [[laneIx,Smin,Smax],...] A series of sections
528  identified by align index and bounded between
529  curvilinear distances Smin and Smax. Only
530  indicators originating from within these
531  sections will be returned.
532  This mode requires declaration of objects
533  maxIndThreshold -> Indicator values above this will be ignored.
534  aggregateInstants -> For interaction-instants with more than one
535  indicator if the same type, return a weighted
536  average according to probability. Defaults to
537  True.
538 
539 
540  Supported return formats:
541  =========================
542  0 1 2 3 4 5 6 7 8
543  format='points' -> [[value1,px1,py1,prob1,time1,num11,num12,descriptor,exposure_list1],[value2,px2,...],...]
544  format='columns' -> [[value1,value2,...],[px1,px2,...],...]
545  '''
546 
547 
548  return_list = []
549  for userPair in self:
550  try: ut1 = userPair.roadUser1.getUserType()
551  except: ut1 = 0
552  try: ut2 = userPair.roadUser2.getUserType()
553  except: ut2 = 0
554 
556  if(userType1):
557  if(userType2):
558  if(userType1 != ut1 or userType2 != ut2):
559  if(userType1 != ut2 or userType2 != ut1):
560  continue
561  else:
562  if(userType1 != ut1):
563  if(userType1 != ut2):
564  continue
565  elif(userType2):
566  if(userType2 != ut1):
567  if(userType2 != ut2):
568  continue
569  # ...by origin
570  if(originLane1):
571  if(originLane2):
572  if(originLane1 != userPair.roadUser1.getCurvilinearPositions()[0][2] or originLane2 != userPair.roadUser2.getCurvilinearPositions()[0][2]):
573  if(originLane1 != userPair.roadUser2.getCurvilinearPositions()[0][2] or originLane2 != userPair.roadUser1.getCurvilinearPositions()[0][2]):
574  continue
575  else:
576  if(originLane1 != userPair.roadUser1.getCurvilinearPositions()[0][2]):
577  if(originLane1 != userPair.roadUser2.getCurvilinearPositions()[0][2]):
578  continue
579  elif(originLane2):
580  if(originLane2 != userPair.roadUser1.getCurvilinearPositions()[0][2]):
581  if(originLane2 != userPair.roadUser2.getCurvilinearPositions()[0][2]):
582  continue
583  # ...by destination
584  if(destLane1):
585  if(destLane2):
586  if(destLane1 != userPair.roadUser1.getCurvilinearPositions()[-1][2] or destLane2 != userPair.roadUser2.getCurvilinearPositions()[-1][2]):
587  if(destLane1 != userPair.roadUser2.getCurvilinearPositions()[-1][2] or destLane2 != userPair.roadUser1.getCurvilinearPositions()[-1][2]):
588  continue
589  else:
590  if(destLane1 != userPair.roadUser1.getCurvilinearPositions()[-1][2]):
591  if(destLane1 != userPair.roadUser2.getCurvilinearPositions()[-1][2]):
592  continue
593  elif(destLane2):
594  if(destLane2 != userPair.roadUser1.getCurvilinearPositions()[-1][2]):
595  if(destLane2 != userPair.roadUser2.getCurvilinearPositions()[-1][2]):
596  continue
597 
598 
599  pair_point_list = userPair.getPointList(**kwargs)
600  if(pair_point_list):
601 
602  if(method==1):
603  pair_indicator = tvaLib.Math.getPercentileKeyFromList(pair_point_list, percentile, sorting=True, sortingColumnIx=0)
604  if(pair_indicator[3]> minimumProbability): return_list.append(pair_indicator)
605  elif(method==3):
606  pair_indicator = tvaLib.Math.getPercentileKeyFromList(pair_point_list, percentile, sorting=True, sortingColumnIx=3)
607  if(pair_indicator[3]> minimumProbability): return_list.append(pair_indicator)
608  else:
609  for pair in pair_point_list:
610  if(pair[3] > minimumProbability):
611  return_list += [pair]
612 
613 
614  if(format == 'columns'): return tvaLib.pointList2ColumnList(return_list)
615  else: return return_list
616 

◆ importPairs()

def lib.int.UserPairs.importPairs (   self,
  paths,
  version = None,
  filename = '' 
)
Attempt to import user pairs (*.pva) from any of paths above. 

Definition at line 375 of file int.py.

375  def importPairs(self, paths, version=None, filename=''):
376  ''' Attempt to import user pairs (*.pva) from any of paths above. '''
377  if(not filename): filename = 'userPairs('+self.methodName+').pva'
378  if(type(paths) != list): paths = [paths]
379  for path in paths:
380  if(os.path.exists(path) and os.path.isfile(os.path.join(path, filename))):
381  with open(os.path.join(path, filename), 'rb') as input_data:
382  originalVersion = pickle.dump(input_data)
383  if(version and originalVersion != version): return False
384  tmp_dict = pickle.load(input_data)
385  self.__dict__.update(tmp_dict)
386  return True
387  return False
388 

◆ merge()

def lib.int.UserPairs.merge (   self,
  source 
)
Merge this set of interactions with another set. 

Definition at line 717 of file int.py.

717  def merge(self, source):
718  ''' Merge this set of interactions with another set. '''
719  self.intWTTCmap = self.intWTTCmap + [x+len(self.data) for x in source.data]
720  self.data += source.data
721  return True
722 
723 

◆ repopulateObjectData()

def lib.int.UserPairs.repopulateObjectData (   self,
  objects,
  graceful = True 
)
Restore each interaction's embedded object data if object's hashes
    match any existing stored hashes. Alternatively, attempt with road
    user number (as reported by Traffic Intelligence).
    
    Output:
    =======
    Returns the percentage of successful repopulations.

Definition at line 682 of file int.py.

682  def repopulateObjectData(self, objects, graceful=True):
683  ''' Restore each interaction's embedded object data if object's hashes
684  match any existing stored hashes. Alternatively, attempt with road
685  user number (as reported by Traffic Intelligence).
686 
687  Output:
688  =======
689  Returns the percentage of successful repopulations.
690  '''
691  hashMap = [obj.hash for obj in objects]
692 
693  original_data_length = len(self)
694  failed_repopulations = 0
695 
696  for i in range(len(self)):
697  try:
698  if(hasattr(self[i],'user1Hash') and hasattr(self[i],'user2Hash')):
699  self[i].roadUser1 = objects[hashMap.index(self[i].user1Hash)]
700  self[i].roadUser2 = objects[hashMap.index(self[i].user2Hash)]
701  else:
702  self[i].roadUser1 = objects[tvaLib.Obj.num2ind(objects, list(self[i].roadUserNumbers)[0])]
703  self[i].roadUser2 = objects[tvaLib.Obj.num2ind(objects, list(self[i].roadUserNumbers)[1])]
704  except Exception:
705  if(graceful):
706  self.data[i] = None
707  failed_repopulations += 1
708  continue
709  else: return 0.0
710 
711  if(failed_repopulations > 0): self.data = filter(None, self.data)
712 
713  return 1 - failed_repopulations/float(original_data_length)
714 
715 
716 

Member Data Documentation

◆ chunk

lib.int.UserPairs.chunk

Process.

Definition at line 419 of file int.py.

◆ chunks

lib.int.UserPairs.chunks

Enable and ensure pickling support of instance methods for multithreading.

Start up

Definition at line 418 of file int.py.

◆ data

lib.int.UserPairs.data

Prune list down.

Definition at line 340 of file int.py.

◆ data_

lib.int.UserPairs.data_

For each unmerged result, merge data.

Definition at line 420 of file int.py.

◆ detectedSize

lib.int.UserPairs.detectedSize

Determine nearest distance.

Commit pair to memory

Definition at line 313 of file int.py.

◆ interactionCount

lib.int.UserPairs.interactionCount

Static input.

Definition at line 306 of file int.py.

◆ intWTTCmap

lib.int.UserPairs.intWTTCmap

Definition at line 307 of file int.py.

◆ methodName

lib.int.UserPairs.methodName

Definition at line 312 of file int.py.

◆ prog

lib.int.UserPairs.prog

Definition at line 422 of file int.py.

◆ skipped_rate_avg

lib.int.UserPairs.skipped_rate_avg

Store data.

Store and print, as neccessary, skipping rate uinformation

Definition at line 311 of file int.py.

◆ skipped_rate_full

lib.int.UserPairs.skipped_rate_full

Definition at line 310 of file int.py.

◆ skipped_rate_none

lib.int.UserPairs.skipped_rate_none

Definition at line 308 of file int.py.

◆ skipped_rate_part

lib.int.UserPairs.skipped_rate_part

Definition at line 309 of file int.py.

◆ usedSize

lib.int.UserPairs.usedSize

Definition at line 314 of file int.py.

◆ verbose

lib.int.UserPairs.verbose

Progress update.

Definition at line 421 of file int.py.


The documentation for this class was generated from the following file: