cjson.py 9.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216
  1. from __future__ import print_function
  2. import sys
  3. import os
  4. sys.path.extend(['.', '..'])
  5. from pycparser import parse_file, c_ast
  6. from Queue import Queue
  7. from threading import Thread
  8. import re
  9. import copy
  10. import files
  11. import analyser
  12. COMMENT_REGEX = r"(//.*)|(/\*[\w\W\n\r]*?\*/)"
  13. USEFUL_REGEX = r"(//.*)|(/\*[\w\W\n\r]*?\*/)|(^\s*$)|(\{\s*\})|(^\s*\{\s*$)|(^\s*\}\s*$)"
  14. CSV_HEADER = ['assignment_id', 'student_id', 'total_submissions', 'code_lines','total_lines','comments']
  15. OP_HEADER = ['!', '<=', '%', '>=', '++', '+', '*', '-', '/', '<', '--', '&&', 'p++', '\'==\'', 'p--', '!=', '||', '>']
  16. CSV_HEADER.extend(OP_HEADER)
  17. COMMANDS = ['Return', 'For', 'FuncCall', 'Assignment', 'Switch', 'DoWhile', 'While', 'FuncDef', 'If']
  18. CSV_HEADER.extend(COMMANDS)
  19. DECLARATIONS = ['vector_int', 'vector_float', 'matrix_string', 'string', 'int', 'pointer_int', 'float', 'matrix_int', 'pointer_float', 'matrix_float']
  20. CSV_HEADER.extend(DECLARATIONS)
  21. COND_CSV_HEADER = ['assignment_id', 'student_id', 'commands_count', 'cond_type', 'logic_op_count', 'rel_op_count']
  22. COND_CSV_HEADER.extend(OP_HEADER)
  23. FOR_CSV_HEADER = copy.deepcopy(COND_CSV_HEADER)
  24. FOR_CSV_HEADER.extend(['use_assignment','use_next_loop'])
  25. finalDataList = list()
  26. class Worker (Thread):
  27. """Thread executing tasks from a given tasks queue"""
  28. def __init__ (self, tasks):
  29. Thread.__init__(self)
  30. self.tasks = tasks
  31. self.daemon = True
  32. self.start()
  33. def run (self):
  34. while True:
  35. func, args, kargs = self.tasks.get()
  36. try: func(*args, **kargs)
  37. except Exception as e:
  38. print(e)
  39. self.tasks.task_done()
  40. class ThreadPool:
  41. """Pool of threads consuming tasks from a queue"""
  42. def __init__ (self, num_threads):
  43. self.tasks = Queue(num_threads)
  44. for _ in range(num_threads): Worker(self.tasks)
  45. def add_task (self, func, *args, **kargs):
  46. """Add a task to the queue"""
  47. self.tasks.put((func, args, kargs))
  48. def wait_completion (self):
  49. """Wait for completion of all the tasks in the queue"""
  50. self.tasks.join()
  51. def processFile (filename):
  52. if len(filename) == 0:
  53. return list()
  54. ast = parse_file(filename, use_cpp=True,
  55. cpp_path='gcc',
  56. cpp_args=['-E', r'-Iutils/fake_libc_include'])
  57. nodeList = [node for (_, node) in ast.children() if node.__class__.__name__ is 'FuncDef' or node.__class__.__name__ is 'Decl']
  58. return nodeList
  59. def processStudentData (studentData, assignment):
  60. try:
  61. result = processFile(studentData[1])
  62. if len(result) == 0:
  63. return
  64. data = analyser.ASTAnalyser(assignment, studentData[0], result)
  65. data.beginAnalysis()
  66. fileText = open(studentData[1], 'r').read()
  67. totalLines = fileText.count("\n")
  68. totalComments = len(re.findall(COMMENT_REGEX, fileText, re.MULTILINE))
  69. usefulText = re.sub(USEFUL_REGEX, "", fileText, flags=re.MULTILINE)
  70. usefulText = os.linesep.join([s for s in usefulText.splitlines() if s])
  71. usefulLines = usefulText.count("\n")
  72. finalDataList.append((data, studentData[2], totalLines, totalComments, usefulLines))
  73. print("Processing data from student %s at assignment %s \n Commands: %s \n Declarations:\n\tBasic:%s\n\tMatrix:%s\n\tPointers:%s\n\tVectors:%s \n Operators: %s \n Constants: %s \n For data: %s \n Condition data: %s \n Lines:%s, Comments:%s, Useful:%s" % (studentData[0], assignment, data.commandCount, data.declarations, data.declarationsMatrixes, data.declarationsPointers, data.declarationsVectors, data.operatorsCount, data.constantInitCount, data.forCommandStr(), data.conditionCommandStr(), totalLines, totalComments, usefulLines))
  74. except Exception as e:
  75. print("%s! Failed to process file: %s" % (e, studentData[1]))
  76. def loadStudentsFolders (raiz, folder):
  77. userDataPath = os.path.join(raiz, folder)
  78. return files.filesFromFolder(userDataPath, "usersdata")
  79. def loadStudentFiles (raiz, folder, studentsFolders):
  80. studentsData = []
  81. for s in studentsFolders:
  82. path = os.path.join(raiz, folder, "usersdata", s)
  83. totalSub = files.countFolders(path)
  84. finalFolder = files.highestFileName(path)
  85. cFileFolder = os.path.join(path, finalFolder, "submittedfiles")
  86. cFiles = files.getFilesInFolder(cFileFolder, "*[cC]*")
  87. if len(cFiles) == 0:
  88. studentsData.append((s, "", totalSub))
  89. else:
  90. studentsData.append((s, cFiles[0], totalSub))
  91. return studentsData
  92. def loadAssignments (raiz):
  93. assignmentsFolders = files.filesFromFolder(raiz, "")
  94. assignments = {}
  95. for a in assignmentsFolders:
  96. studentsFolders = loadStudentsFolders(raiz, a)
  97. if(len(studentsFolders) == 0):
  98. assignments[a] = []
  99. continue
  100. studentsData = loadStudentFiles(raiz, a, studentsFolders)
  101. assignments[a] = studentsData
  102. return assignments
  103. def initEmptyDict (list):
  104. result = dict()
  105. for k in list:
  106. result[k] = 0
  107. return result
  108. def saveToFile (filePath, data):
  109. file = open(filePath, "w+")
  110. file.write(data)
  111. file.close()
  112. #--- run ---#
  113. if __name__ == "__main__":
  114. if len(sys.argv) > 1:
  115. file = sys.argv[1]
  116. if file == "-f" and len(sys.argv) > 2:
  117. print(processFile(sys.argv[2]))
  118. elif file != "-f":
  119. raiz = "./" + sys.argv[1]
  120. data = loadAssignments(raiz)
  121. pool = ThreadPool(10)
  122. for a in data:
  123. for studentData in data[a]:
  124. pool.add_task(processStudentData, studentData, a)
  125. pool.wait_completion()
  126. mainCSVFile = ""
  127. forCSVFile = ""
  128. condCSVFile = ""
  129. assignmentList = dict()
  130. constantInitCount = dict()
  131. for data in finalDataList:
  132. if data[0].assignment in assignmentList:
  133. assignmentList[data[0].assignment].append(data)
  134. else:
  135. assignmentList[data[0].assignment] = list()
  136. assignmentList[data[0].assignment].append(data)
  137. for assignmentKey in assignmentList:
  138. for studentData in assignmentList[assignmentKey]:
  139. astInfo = studentData[0]
  140. for k in astInfo.constantInitCount:
  141. if k in constantInitCount:
  142. constantInitCount[k] += astInfo.constantInitCount[k]
  143. else:
  144. constantInitCount[k] = astInfo.constantInitCount[k]
  145. studentOpData = initEmptyDict(analyser.VALID_OPS)
  146. for key in astInfo.operatorsCount:
  147. studentOpData[key] = astInfo.operatorsCount[key]
  148. studentCommandData = initEmptyDict(COMMANDS)
  149. for key in astInfo.commandCount:
  150. studentCommandData[key] = astInfo.commandCount[key]
  151. studentDeclarationData = initEmptyDict(DECLARATIONS)
  152. for key in astInfo.declarations:
  153. studentDeclarationData[key] = astInfo.declarations[key]
  154. for key in astInfo.declarationsPointers:
  155. studentDeclarationData["pointer_" + key] = astInfo.declarationsPointers[key]
  156. for key in astInfo.declarationsVectors:
  157. studentDeclarationData["vector_" + key] = astInfo.declarationsVectors[key]
  158. for key in astInfo.declarationsMatrixes:
  159. studentDeclarationData["matrix_" + key] = astInfo.declarationsMatrixes[key]
  160. mainCSVFile += "%s,%s,%s,%s,%s,%s" % (assignmentKey, astInfo.student, studentData[1], studentData[4], studentData[2], studentData[3])
  161. mainCSVFile += "," + ','.join([str(v) for v in studentOpData.values()])
  162. mainCSVFile += "," + ",".join([str(v) for v in studentCommandData.values()])
  163. mainCSVFile += "," + ",".join([str(v) for v in studentDeclarationData.values()])
  164. mainCSVFile += "\n"
  165. #For_structure.csv
  166. for i in astInfo.forCommandData:
  167. forCSVFile += "%s,%s,%s,%s,%s,%s" % (assignmentKey, astInfo.student, i.cmdCount, i.condType, i.numLogicOps, i.numRelOps)
  168. opData = initEmptyDict(analyser.VALID_OPS)
  169. for op in i.opList:
  170. opData[op] += 1
  171. forCSVFile += "," + ','.join([str(v) for v in opData.values()])
  172. forCSVFile += ",%s,%s\n" % (i.useAssignment, i.useNext)
  173. #condition_structure.csv
  174. for i in astInfo.conditionCommandData:
  175. condCSVFile += "%s,%s,%s,%s,%s,%s" % (assignmentKey, astInfo.student, i.cmdCount, i.condType, i.numLogicOps, i.numRelOps)
  176. opData = initEmptyDict(analyser.VALID_OPS)
  177. for op in i.opList:
  178. opData[op] += 1
  179. condCSVFile += "," + ','.join([str(v) for v in opData.values()])
  180. condCSVFile += "\n"
  181. mainCSVFile = ','.join(CSV_HEADER) + '\n' + mainCSVFile
  182. saveToFile("data.csv", mainCSVFile)
  183. forCSVFile = ','.join(FOR_CSV_HEADER) + '\n' + forCSVFile
  184. saveToFile("for_structure.csv", forCSVFile)
  185. condCSVFile = ','.join(COND_CSV_HEADER) + '\n' + condCSVFile
  186. saveToFile("cond_structure.csv", condCSVFile)
  187. constantInitFile = "constant,count\n"
  188. for k in constantInitCount:
  189. constantInitFile += "%s,%s\n" % (k, str(constantInitCount[k]))
  190. saveToFile("const_init.csv", constantInitFile)
  191. else:
  192. print("cjson -f file | cjon folder/")
  193. else:
  194. print("cjson -f file | cjon folder/")